diff --git a/.eslintignore b/.eslintignore
deleted file mode 100644
index 728adc67dc8727..00000000000000
--- a/.eslintignore
+++ /dev/null
@@ -1,9 +0,0 @@
-**/dist/**/*
-**/vendor/**/*
-**/tests/**/fixtures/**/*
-!.github
-*.d.ts
-config/chartcuterie/config.js
-fixtures/profiles/embedded.js
-fixtures/artifact_bundle_debug_ids/**/*
-fixtures/artifact_bundle_duplicated_debug_ids/**/*
diff --git a/.eslintrc.js b/.eslintrc.js
deleted file mode 100644
index dfb3482ca6aa79..00000000000000
--- a/.eslintrc.js
+++ /dev/null
@@ -1,976 +0,0 @@
-/* eslint-env node */
-
-const detectDeprecations = !!process.env.SENTRY_DETECT_DEPRECATIONS;
-
-const baseRules = {
- /**
- * Strict mode
- */
- // https://eslint.org/docs/rules/strict
- strict: ['error', 'global'],
-
- /**
- * Variables
- */
- // https://eslint.org/docs/rules/no-shadow-restricted-names
- 'no-shadow-restricted-names': ['error'],
-
- /**
- * Possible errors
- */
- // https://eslint.org/docs/rules/no-cond-assign
- 'no-cond-assign': ['error', 'always'],
-
- // https://eslint.org/docs/rules/no-alert
- 'no-alert': ['error'],
-
- // https://eslint.org/docs/rules/no-constant-condition
- 'no-constant-condition': ['warn'],
-
- // https://eslint.org/docs/rules/no-empty
- 'no-empty': ['error'],
-
- // https://eslint.org/docs/rules/no-ex-assign
- 'no-ex-assign': ['error'],
-
- // https://eslint.org/docs/rules/no-extra-boolean-cast
- 'no-extra-boolean-cast': ['error'],
-
- // https://eslint.org/docs/rules/no-func-assign
- 'no-func-assign': ['error'],
-
- // https://eslint.org/docs/rules/no-inner-declarations
- 'no-inner-declarations': ['error'],
-
- // https://eslint.org/docs/rules/no-invalid-regexp
- 'no-invalid-regexp': ['error'],
-
- // https://eslint.org/docs/rules/no-irregular-whitespace
- 'no-irregular-whitespace': ['error'],
-
- // https://eslint.org/docs/rules/no-obj-calls
- 'no-obj-calls': ['error'],
-
- // https://eslint.org/docs/rules/no-sparse-arrays
- 'no-sparse-arrays': ['error'],
-
- // https://eslint.org/docs/rules/block-scoped-var
- 'block-scoped-var': ['error'],
-
- /**
- * Best practices
- */
- // https://eslint.org/docs/rules/consistent-return
- 'consistent-return': ['error'],
-
- // https://eslint.org/docs/rules/default-case
- 'default-case': ['error'],
-
- // https://eslint.org/docs/rules/dot-notation
- 'dot-notation': [
- 'error',
- {
- allowKeywords: true,
- },
- ],
-
- // https://eslint.org/docs/rules/guard-for-in [REVISIT ME]
- 'guard-for-in': ['off'],
-
- // https://eslint.org/docs/rules/no-caller
- 'no-caller': ['error'],
-
- // https://eslint.org/docs/rules/no-eval
- 'no-eval': ['error'],
-
- // https://eslint.org/docs/rules/no-extend-native
- 'no-extend-native': ['error'],
-
- // https://eslint.org/docs/rules/no-extra-bind
- 'no-extra-bind': ['error'],
-
- // https://eslint.org/docs/rules/no-fallthrough
- 'no-fallthrough': ['error'],
-
- // https://eslint.org/docs/rules/no-floating-decimal
- 'no-floating-decimal': ['error'],
-
- // https://eslint.org/docs/rules/no-implied-eval
- 'no-implied-eval': ['error'],
-
- // https://eslint.org/docs/rules/no-lone-blocks
- 'no-lone-blocks': ['error'],
-
- // https://eslint.org/docs/rules/no-loop-func
- 'no-loop-func': ['error'],
-
- // https://eslint.org/docs/rules/no-multi-str
- 'no-multi-str': ['error'],
-
- // https://eslint.org/docs/rules/no-native-reassign
- 'no-native-reassign': ['error'],
-
- // https://eslint.org/docs/rules/no-new
- 'no-new': ['error'],
-
- // https://eslint.org/docs/rules/no-new-func
- 'no-new-func': ['error'],
-
- // https://eslint.org/docs/rules/no-new-wrappers
- 'no-new-wrappers': ['error'],
-
- // https://eslint.org/docs/rules/no-octal
- 'no-octal': ['error'],
-
- // https://eslint.org/docs/rules/no-octal-escape
- 'no-octal-escape': ['error'],
-
- // https://eslint.org/docs/rules/no-param-reassign [REVISIT ME]
- 'no-param-reassign': ['off'],
-
- // https://eslint.org/docs/rules/no-proto
- 'no-proto': ['error'],
-
- // https://eslint.org/docs/rules/no-return-assign
- 'no-return-assign': ['error'],
-
- // https://eslint.org/docs/rules/no-script-url
- 'no-script-url': ['error'],
-
- // https://eslint.org/docs/rules/no-self-compare
- 'no-self-compare': ['error'],
-
- // https://eslint.org/docs/rules/no-sequences
- 'no-sequences': ['error'],
-
- // https://eslint.org/docs/rules/no-throw-literal
- 'no-throw-literal': ['error'],
-
- // https://eslint.org/docs/rules/no-with
- 'no-with': ['error'],
-
- // https://eslint.org/docs/rules/radix
- radix: ['error'],
-
- // https://eslint.org/docs/rules/space-in-brackets.html
- 'computed-property-spacing': ['error', 'never'],
-
- // https://eslint.org/docs/rules/space-in-brackets.html
- 'array-bracket-spacing': ['error', 'never'],
-
- // https://eslint.org/docs/rules/space-in-brackets.html
- 'object-curly-spacing': ['error', 'never'],
-
- // https://eslint.org/docs/rules/object-shorthand
- 'object-shorthand': ['error', 'properties'],
-
- // https://eslint.org/docs/rules/space-infix-ops.html
- 'space-infix-ops': ['error'],
-
- // https://eslint.org/docs/rules/vars-on-top
- 'vars-on-top': ['off'],
-
- // https://eslint.org/docs/rules/wrap-iife
- 'wrap-iife': ['error', 'any'],
-
- // https://eslint.org/docs/rules/array-callback-return
- 'array-callback-return': ['error'],
-
- // https://eslint.org/docs/rules/yoda
- yoda: ['error'],
-
- // https://eslint.org/docs/rules/no-else-return
- 'no-else-return': ['error', {allowElseIf: false}],
-
- // https://eslint.org/docs/rules/require-await
- 'require-await': ['error'],
-
- // https://eslint.org/docs/rules/multiline-comment-style
- 'multiline-comment-style': ['error', 'separate-lines'],
-
- // https://eslint.org/docs/rules/spaced-comment
- 'spaced-comment': [
- 'error',
- 'always',
- {
- line: {markers: ['/'], exceptions: ['-', '+']},
- block: {exceptions: ['*'], balanced: true},
- },
- ],
-};
-
-const reactReactRules = {
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/display-name.md
- 'react/display-name': ['off'],
-
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-multi-comp.md
- 'react/no-multi-comp': [
- 'off',
- {
- ignoreStateless: true,
- },
- ],
-
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-fragments.md
- 'react/jsx-fragments': ['error', 'element'],
-
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-handler-names.md
- // Ensures that any component or prop methods used to handle events are correctly prefixed.
- 'react/jsx-handler-names': [
- 'off',
- {
- eventHandlerPrefix: 'handle',
- eventHandlerPropPrefix: 'on',
- },
- ],
-
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-key.md
- 'react/jsx-key': ['error'],
-
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-undef.md
- 'react/jsx-no-undef': ['error'],
-
- // Disabled as we use the newer JSX transform babel plugin.
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-uses-react.md
- 'react/jsx-uses-react': ['off'],
-
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-uses-vars.md
- 'react/jsx-uses-vars': ['error'],
-
- /**
- * Deprecation related rules
- */
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-deprecated.md
- 'react/no-deprecated': ['error'],
-
- // Prevent usage of the return value of React.render
- // deprecation: https://facebook.github.io/react/docs/react-dom.html#render
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-render-return-value.md
- 'react/no-render-return-value': ['error'],
-
- // Children should always be actual children, not passed in as a prop.
- // When using JSX, the children should be nested between the opening and closing tags. When not using JSX, the children should be passed as additional arguments to React.createElement.
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-children-prop.md
- 'react/no-children-prop': ['error'],
-
- // This rule helps prevent problems caused by using children and the dangerouslySetInnerHTML prop at the same time.
- // React will throw a warning if this rule is ignored.
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-danger-with-children.md
- 'react/no-danger-with-children': ['error'],
-
- // Prevent direct mutation of this.state
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-direct-mutation-state.md
- 'react/no-direct-mutation-state': ['error'],
-
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-did-mount-set-state.md
- 'react/no-did-mount-set-state': ['error'],
-
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-did-update-set-state.md"
- 'react/no-did-update-set-state': ['error'],
-
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-redundant-should-component-update.md
- 'react/no-redundant-should-component-update': ['error'],
-
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-typos.md
- 'react/no-typos': ['error'],
-
- // Prevent invalid characters from appearing in markup
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unescaped-entities.md
- 'react/no-unescaped-entities': ['off'],
-
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unknown-property.md
- 'react/no-unknown-property': ['error', {ignore: ['css']}],
-
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unused-prop-types.md
- // Disabled since this currently fails to correctly detect a lot of
- // typescript prop type usage.
- 'react/no-unused-prop-types': ['off'],
-
- // We do not need proptypes since we're using typescript
- 'react/prop-types': ['off'],
-
- // When writing the render method in a component it is easy to forget to return the JSX content.
- // This rule will warn if the return statement is missing.
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/require-render-return.md
- 'react/require-render-return': ['error'],
-
- // Disabled as we are using the newer JSX transform babel plugin.
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/react-in-jsx-scope.md
- 'react/react-in-jsx-scope': ['off'],
-
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/self-closing-comp.md
- 'react/self-closing-comp': ['error'],
-
- // This also causes issues with typescript
- // See: https://github.com/yannickcr/eslint-plugin-react/issues/2066
- //
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/sort-comp.md
- 'react/sort-comp': ['warn'],
-
- // Disabled because of prettier
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/wrap-multilines.md
- 'react/jsx-wrap-multilines': ['off'],
-
- // Consistent (never add ={true})
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-boolean-value.md
- 'react/jsx-boolean-value': ['error', 'never'],
-
- // Consistent function component declaration styles
- // https://github.com/jsx-eslint/eslint-plugin-react/blob/master/docs/rules/function-component-definition.md
- 'react/function-component-definition': [
- 'error',
- {namedComponents: 'function-declaration'},
- ],
-};
-
-const reactImportRules = {
- // Not recommended to be enabled with typescript-eslint
- // https://typescript-eslint.io/linting/troubleshooting/performance-troubleshooting/#eslint-plugin-import
- 'import/no-unresolved': ['off'],
- 'import/named': ['off'],
- 'import/default': ['off'],
- 'import/export': ['off'],
- 'import/no-named-as-default-member': ['off'],
-
- // Redflags
- // do not allow a default import name to match a named export (airbnb: error)
- // Issue with `DefaultIssuePlugin` and `app/plugins/index`
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-named-as-default.md
- 'import/no-named-as-default': ['off'],
-
- // disallow use of jsdoc-marked-deprecated imports
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-deprecated.md
- 'import/no-deprecated': ['off'],
-
- // Forbid mutable exports (airbnb: error)
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-mutable-exports.md
- // TODO: enable?
- 'import/no-mutable-exports': ['off'],
-
- // disallow require()
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-commonjs.md
- 'import/no-commonjs': ['off'],
-
- // disallow AMD require/define
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-amd.md
- 'import/no-amd': ['error'],
-
- // disallow duplicate imports
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-duplicates.md
- 'import/no-duplicates': ['error'],
-
- // disallow namespace imports
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-namespace.md
- 'import/no-namespace': ['off'],
-
- // Ensure consistent use of file extension within the import path
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/extensions.md
- // TODO this fucks up getsentry
- 'import/extensions': [
- 'off',
- 'always',
- {
- js: 'never',
- jsx: 'never',
- },
- ],
-
- // Require a newline after the last import/require in a group
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/newline-after-import.md
- 'import/newline-after-import': ['error'],
-
- // Require modules with a single export to use a default export (airbnb: error)
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/prefer-default-export.md
- 'import/prefer-default-export': ['off'],
-
- // Restrict which files can be imported in a given folder
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-restricted-paths.md
- 'import/no-restricted-paths': ['off'],
-
- // Forbid modules to have too many dependencies
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/max-dependencies.md
- 'import/max-dependencies': ['off', {max: 10}],
-
- // Forbid import of modules using absolute paths
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-absolute-path.md
- 'import/no-absolute-path': ['error'],
-
- // Forbid require() calls with expressions (airbnb: error)
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-dynamic-require.md
- 'import/no-dynamic-require': ['off'],
-
- // Use webpack default chunk names
- 'import/dynamic-import-chunkname': ['off'],
-
- // prevent importing the submodules of other modules
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-internal-modules.md
- 'import/no-internal-modules': [
- 'off',
- {
- allow: [],
- },
- ],
-
- // Warn if a module could be mistakenly parsed as a script by a consumer
- // leveraging Unambiguous JavaScript Grammar
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/unambiguous.md
- // this should not be enabled until this proposal has at least been *presented* to TC39.
- // At the moment, it"s not a thing.
- 'import/unambiguous': ['off'],
-
- // Forbid Webpack loader syntax in imports
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-webpack-loader-syntax.md
- 'import/no-webpack-loader-syntax': ['error'],
-
- // Prevent unassigned imports
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-unassigned-import.md
- // importing for side effects is perfectly acceptable, if you need side effects.
- 'import/no-unassigned-import': ['off'],
-
- // Prevent importing the default as if it were named
- // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-named-default.md
- 'import/no-named-default': ['error'],
-
- // Reports if a module"s default export is unnamed
- // https://github.com/benmosher/eslint-plugin-import/blob/d9b712ac7fd1fddc391f7b234827925c160d956f/docs/rules/no-anonymous-default-export.md
- 'import/no-anonymous-default-export': [
- 'error',
- {
- allowArray: false,
- allowArrowFunction: false,
- allowAnonymousClass: false,
- allowAnonymousFunction: false,
- allowCallExpression: true,
- allowLiteral: false,
- allowObject: false,
- },
- ],
-};
-
-const reactJestRules = {
- 'jest/no-disabled-tests': 'error',
-};
-
-const reactRules = {
- ...reactReactRules,
- ...reactImportRules,
- ...reactJestRules,
- /**
- * React hooks
- */
- 'react-hooks/exhaustive-deps': 'error',
- // Biome not yet enforcing all parts of this rule https://github.com/biomejs/biome/issues/1984
- 'react-hooks/rules-of-hooks': 'error',
-
- /**
- * Custom
- */
- // highlights literals in JSX components w/o translation tags
- 'getsentry/jsx-needs-il8n': ['off'],
- 'testing-library/render-result-naming-convention': 'off',
- 'testing-library/no-unnecessary-act': 'off',
-
- // Disabled as we have many tests which render as simple validations
- 'jest/expect-expect': 'off',
-
- // Disabled as we have some comment out tests that cannot be
- // uncommented due to typescript errors.
- 'jest/no-commented-out-tests': 'off',
-
- // Disabled as we do sometimes have conditional expects
- 'jest/no-conditional-expect': 'off',
-
- // Useful for exporting some test utilities
- 'jest/no-export': 'off',
-
- 'typescript-sort-keys/interface': [
- 'error',
- 'asc',
- {caseSensitive: true, natural: false, requiredFirst: true},
- ],
-};
-
-const appRules = {
- /**
- * emotion rules for v10
- *
- * This probably aren't as necessary anymore, but let's remove when we move to v11
- */
- '@emotion/jsx-import': 'off',
- '@emotion/no-vanilla': 'error',
- '@emotion/import-from-emotion': 'error',
- '@emotion/styled-import': 'error',
-
- // no-undef is redundant with typescript as tsc will complain
- // A downside is that we won't get eslint errors about it, but your editors should
- // support tsc errors so....
- // https://eslint.org/docs/rules/no-undef
- 'no-undef': 'off',
-
- // Let formatter handle this
- 'arrow-body-style': 'off',
-
- /**
- * Need to use typescript version of these rules
- * https://eslint.org/docs/rules/no-shadow
- */
- 'no-shadow': 'off',
- '@typescript-eslint/no-shadow': 'error',
-
- // This only override the `args` rule (which is "none"). There are too many errors and it's difficult to manually
- // fix them all, so we'll have to incrementally update.
- // https://eslint.org/docs/rules/no-unused-vars
- 'no-unused-vars': 'off',
- '@typescript-eslint/no-unused-vars': [
- 'error',
- {
- vars: 'all',
- args: 'all',
- // TODO(scttcper): We could enable this to enforce catch (error)
- // https://eslint.org/docs/latest/rules/no-unused-vars#caughterrors
- caughtErrors: 'none',
-
- // Ignore vars that start with an underscore
- // e.g. if you want to omit a property using object spread:
- //
- // const {name: _name, ...props} = this.props;
- //
- varsIgnorePattern: '^_',
- argsIgnorePattern: '^_',
- destructuredArrayIgnorePattern: '^_',
- },
- ],
-
- // https://eslint.org/docs/rules/no-use-before-define
- 'no-use-before-define': 'off',
- // This seems to have been turned on while previously it had been off
- '@typescript-eslint/no-use-before-define': ['off'],
-
- /**
- * Restricted imports, e.g. deprecated libraries, etc
- *
- * See: https://eslint.org/docs/rules/no-restricted-imports
- */
- 'no-restricted-imports': [
- 'error',
- {
- paths: [
- {
- name: 'enzyme',
- message:
- 'Please import from `sentry-test/enzyme` instead. See: https://github.com/getsentry/frontend-handbook#undefined-theme-properties-in-tests for more information',
- },
- {
- name: '@testing-library/react',
- message:
- 'Please import from `sentry-test/reactTestingLibrary` instead so that we can ensure consistency throughout the codebase',
- },
- {
- name: '@testing-library/react-hooks',
- message:
- 'Please import from `sentry-test/reactTestingLibrary` instead so that we can ensure consistency throughout the codebase',
- },
- {
- name: '@testing-library/user-event',
- message:
- 'Please import from `sentry-test/reactTestingLibrary` instead so that we can ensure consistency throughout the codebase',
- },
- {
- name: '@sentry/browser',
- message:
- 'Please import from `@sentry/react` to ensure consistency throughout the codebase.',
- },
- {
- name: 'marked',
- message:
- "Please import marked from 'app/utils/marked' so that we can ensure sanitation of marked output",
- },
-
- {
- name: 'lodash',
- message:
- "Please import lodash utilities individually. e.g. `import isEqual from 'lodash/isEqual';`. See https://github.com/getsentry/frontend-handbook#lodash from for information",
- },
- {
- name: 'lodash/get',
- message:
- 'Optional chaining `?.` and nullish coalescing operators `??` are available and preferred over using `lodash/get`. See https://github.com/getsentry/frontend-handbook#new-syntax for more information',
- },
- {
- name: 'react-bootstrap',
- message:
- 'Avoid usage of any react-bootstrap components as it will soon be removed',
- },
- {
- name: 'sentry/utils/theme',
- importNames: ['lightColors', 'darkColors'],
- message:
- "'lightColors' and 'darkColors' exports intended for use in Storybook only. Instead, use theme prop from emotion or the useTheme hook.",
- },
- {
- name: 'react-router',
- importNames: ['withRouter'],
- message:
- "Use 'useLocation', 'useParams', 'useNavigate', 'useRoutes' from sentry/utils instead.",
- },
- {
- name: 'sentry/utils/withSentryRouter',
- importNames: ['withSentryRouter'],
- message:
- "Use 'useLocation', 'useParams', 'useNavigate', 'useRoutes' from sentry/utils instead.",
- },
- ],
- },
- ],
-
- /**
- * Better import sorting
- */
- 'sort-imports': 'off',
- 'import/order': 'off',
- 'simple-import-sort/imports': [
- 'error',
- {
- groups: [
- // Side effect imports.
- ['^\\u0000'],
-
- // Node.js builtins.
- // biome-ignore lint/correctness/noNodejsModules: Need to get the list of things!
- [`^(${require('node:module').builtinModules.join('|')})(/|$)`],
-
- // Packages. `react` related packages come first.
- ['^react', '^@?\\w'],
-
- // Test should be separate from the app
- ['^(sentry-test|getsentry-test)(/.*|$)'],
-
- // Internal packages.
- ['^(sentry-locale|sentry-images)(/.*|$)'],
-
- ['^(getsentry-images)(/.*|$)'],
-
- ['^(app|sentry)(/.*|$)'],
-
- // Getsentry packages.
- ['^(admin|getsentry)(/.*|$)'],
-
- // Style imports.
- ['^.+\\.less$'],
-
- // Parent imports. Put `..` last.
- ['^\\.\\.(?!/?$)', '^\\.\\./?$'],
-
- // Other relative imports. Put same-folder imports and `.` last.
- ['^\\./(?=.*/)(?!/?$)', '^\\.(?!/?$)', '^\\./?$'],
- ],
- },
- ],
-
- 'sentry/no-digits-in-tn': ['error'],
-
- 'sentry/no-dynamic-translations': ['error'],
-
- // https://github.com/xojs/eslint-config-xo-typescript/blob/9791a067d6a119a21a4db72c02f1da95e25ffbb6/index.js#L95
- '@typescript-eslint/no-restricted-types': [
- 'error',
- {
- types: {
- // TODO(scttcper): Turn object on to make our types more strict
- // object: {
- // message: 'The `object` type is hard to use. Use `Record` instead. See: https://github.com/typescript-eslint/typescript-eslint/pull/848',
- // fixWith: 'Record'
- // },
- Buffer: {
- message:
- 'Use Uint8Array instead. See: https://sindresorhus.com/blog/goodbye-nodejs-buffer',
- suggest: ['Uint8Array'],
- },
- '[]': "Don't use the empty array type `[]`. It only allows empty arrays. Use `SomeType[]` instead.",
- '[[]]':
- "Don't use `[[]]`. It only allows an array with a single element which is an empty array. Use `SomeType[][]` instead.",
- '[[[]]]': "Don't use `[[[]]]`. Use `SomeType[][][]` instead.",
- },
- },
- ],
- // TODO(scttcper): Turn no-empty-object-type on to make our types more strict
- // '@typescript-eslint/no-empty-object-type': 'error',
- // TODO(scttcper): Turn no-function on to make our types more strict
- // '@typescript-eslint/no-unsafe-function-type': 'error',
- '@typescript-eslint/no-wrapper-object-types': 'error',
-
- // Naming convention enforcements
- '@typescript-eslint/naming-convention': [
- 'error',
- {
- selector: 'typeLike',
- format: ['PascalCase'],
- leadingUnderscore: 'allow',
- },
- {
- selector: 'enumMember',
- format: ['UPPER_CASE'],
- },
- ],
-
- // Don't allow lookbehind expressions in regexp as they crash safari
- // We've accidentally used lookbehinds a few times and caused problems.
- 'no-lookahead-lookbehind-regexp/no-lookahead-lookbehind-regexp': [
- 'error',
- 'no-lookbehind',
- 'no-negative-lookbehind',
- ],
-};
-
-const strictRules = {
- // https://eslint.org/docs/rules/no-console
- 'no-console': ['error'],
-
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-is-mounted.md
- 'react/no-is-mounted': ['error'],
-
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-find-dom-node.md
- // Recommended to use callback refs instead
- 'react/no-find-dom-node': ['error'],
-
- // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-string-refs.md
- // This is now considered legacy, callback refs preferred
- 'react/no-string-refs': ['error'],
-
- 'jest/no-large-snapshots': ['error', {maxSize: 2000}],
-
- 'sentry/no-styled-shortcut': ['error'],
-};
-
-const extendsList = [
- 'plugin:jest/recommended',
- 'plugin:jest-dom/recommended',
- 'plugin:import/typescript',
-];
-if (detectDeprecations) {
- extendsList.push('plugin:deprecation/recommended');
-}
-
-module.exports = {
- root: true,
- extends: extendsList,
-
- plugins: [
- 'jest-dom',
- 'testing-library',
- 'typescript-sort-keys',
- 'react-hooks',
- '@typescript-eslint',
- '@emotion',
- 'import',
- 'react',
- 'sentry',
- 'simple-import-sort',
- 'no-lookahead-lookbehind-regexp',
- ],
-
- parser: '@typescript-eslint/parser',
-
- parserOptions: detectDeprecations
- ? {
- warnOnUnsupportedTypeScriptVersion: false,
- ecmaVersion: 6,
- sourceType: 'module',
- ecmaFeatures: {
- jsx: true,
- modules: true,
- legacyDecorators: true,
- },
- project: './tsconfig.json',
- }
- : {
- warnOnUnsupportedTypeScriptVersion: false,
- ecmaVersion: 6,
- sourceType: 'module',
- ecmaFeatures: {
- jsx: true,
- modules: true,
- legacyDecorators: true,
- },
- },
-
- env: {
- browser: true,
- es6: true,
- jest: true,
- jquery: true, // hard-loaded into vendor.js
- },
-
- globals: {
- require: false,
- expect: false,
- MockApiClient: true,
- tick: true,
- jest: true,
- },
-
- settings: {
- react: {
- version: '17.0.2', // React version, can not `detect` because of getsentry
- },
- 'import/parsers': {
- '@typescript-eslint/parser': ['.ts', '.tsx'],
- },
- 'import/resolver': {
- typescript: {},
- },
- 'import/extensions': ['.js', '.jsx'],
- },
-
- rules: {
- ...baseRules,
- ...reactRules,
- ...appRules,
- ...strictRules,
- 'react-hooks/rules-of-hooks': 'error',
- 'react-hooks/exhaustive-deps': [
- 'error',
- {additionalHooks: '(useEffectAfterFirstRender|useMemoWithPrevious)'},
- ],
- 'no-restricted-imports': [
- 'error',
- {
- patterns: [
- {
- group: ['sentry/components/devtoolbar/*'],
- message: 'Do not depend on toolbar internals',
- },
- ],
- paths: [
- {
- name: '@testing-library/react',
- message:
- 'Please import from `sentry-test/reactTestingLibrary` instead so that we can ensure consistency throughout the codebase',
- },
- {
- name: '@testing-library/react-hooks',
- message:
- 'Please import from `sentry-test/reactTestingLibrary` instead so that we can ensure consistency throughout the codebase',
- },
- {
- name: '@testing-library/user-event',
- message:
- 'Please import from `sentry-test/reactTestingLibrary` instead so that we can ensure consistency throughout the codebase',
- },
- {
- name: '@sentry/browser',
- message:
- 'Please import from `@sentry/react` to ensure consistency throughout the codebase.',
- },
- {
- name: 'marked',
- message:
- "Please import marked from 'app/utils/marked' so that we can ensure sanitation of marked output",
- },
- {
- name: 'lodash',
- message:
- "Please import lodash utilities individually. e.g. `import isEqual from 'lodash/isEqual';`. See https://github.com/getsentry/frontend-handbook#lodash from for information",
- },
- {
- name: 'lodash/get',
- message:
- 'Optional chaining `?.` and nullish coalescing operators `??` are available and preferred over using `lodash/get`. See https://github.com/getsentry/frontend-handbook#new-syntax for more information',
- },
- {
- name: 'sentry/utils/theme',
- importNames: ['lightColors', 'darkColors'],
- message:
- "'lightColors' and 'darkColors' exports intended for use in Storybook only. Instead, use theme prop from emotion or the useTheme hook.",
- },
- {
- name: 'react-router',
- importNames: ['withRouter'],
- message:
- "Use 'useLocation', 'useParams', 'useNavigate', 'useRoutes' from sentry/utils instead.",
- },
- {
- name: 'sentry/utils/withSentryRouter',
- importNames: ['withSentryRouter'],
- message:
- "Use 'useLocation', 'useParams', 'useNavigate', 'useRoutes' from sentry/utils instead.",
- },
- {
- name: 'qs',
- message: 'Please use query-string instead of qs',
- },
- {
- name: 'moment',
- message: 'Please import moment-timezone instead of moment',
- },
- ],
- },
- ],
-
- // TODO(@anonrig): Remove this from eslint-sentry-config
- 'space-infix-ops': 'off',
- 'object-shorthand': 'off',
- 'object-curly-spacing': 'off',
- 'import/no-amd': 'off',
- 'no-danger-with-children': 'off',
- 'no-fallthrough': 'off',
- 'no-obj-calls': 'off',
- 'array-bracket-spacing': 'off',
- 'computed-property-spacing': 'off',
- 'react/no-danger-with-children': 'off',
- 'jest/no-disabled-tests': 'off',
- },
- // JSON file formatting is handled by Biome. ESLint should not be linting
- // and formatting these files.
- ignorePatterns: ['*.json'],
- overrides: [
- {
- files: ['static/app/components/devtoolbar/**/*.{ts,tsx}'],
- rules: {
- 'no-restricted-imports': [
- 'error',
- {
- paths: [
- {
- name: 'sentry/utils/queryClient',
- message:
- 'Import from `@tanstack/react-query` and `./hooks/useFetchApiData` or `./hooks/useFetchInfiniteApiData` instead.',
- },
- ],
- },
- ],
- },
- },
- {
- files: ['static/**/*.spec.{ts,js}', 'tests/js/**/*.{ts,js}'],
- extends: ['plugin:testing-library/react', ...extendsList],
- rules: {
- ...baseRules,
- ...reactRules,
- ...appRules,
- ...strictRules,
- // TODO(@anonrig): Remove this from eslint-sentry-config
- 'space-infix-ops': 'off',
- 'object-shorthand': 'off',
- 'object-curly-spacing': 'off',
- 'import/no-amd': 'off',
- 'no-danger-with-children': 'off',
- 'no-fallthrough': 'off',
- 'no-obj-calls': 'off',
- 'array-bracket-spacing': 'off',
- 'computed-property-spacing': 'off',
- 'react/no-danger-with-children': 'off',
- 'jest/no-disabled-tests': 'off',
- },
- },
- {
- // We specify rules explicitly for the sdk-loader here so we do not have
- // eslint ignore comments included in the source file, which is consumed
- // by users.
- files: ['**/js-sdk-loader.ts'],
- rules: {
- 'no-console': 'off',
- },
- },
- ],
-};
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 9695c88749836f..098974ecd3aa4a 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -122,6 +122,7 @@ pyproject.toml @getsentry/owners-pytho
babel.config.* @getsentry/owners-js-build
biome.json @getsentry/owners-js-build
build-utils/ @getsentry/owners-js-build
+eslint.config.mjs @getsentry/owners-js-build
jest.config.ts @getsentry/owners-js-build
tsconfig.* @getsentry/owners-js-build
webpack.config.* @getsentry/owners-js-build
@@ -348,12 +349,23 @@ tests/sentry/api/endpoints/test_organization_dashboard_widget_details.py @ge
## DevToolbar
-/src/sentry/templates/sentry/toolbar/ @getsentry/replay
-/src/sentry/toolbar/ @getsentry/replay
-/tests/sentry/toolbar/ @getsentry/replay
+/src/sentry/templates/sentry/toolbar/ @getsentry/replay
+/src/sentry/toolbar/ @getsentry/replay
+/tests/sentry/toolbar/ @getsentry/replay
+/static/app/components/devtoolbar/ @getsentry/replay-frontend
+/src/sentry/middleware/devtoolbar.py @getsentry/replay-backend
+/tests/sentry/middleware/test_devtoolbar.py @getsentry/replay-backend
## End of DevToolbar
+## Frontend
+/static/app/components/analyticsArea.spec.tsx @getsentry/app-frontend
+/static/app/components/analyticsArea.tsx @getsentry/app-frontend
+/static/app/components/events/interfaces/ @getsentry/app-frontend
+/static/app/components/forms/ @getsentry/app-frontend
+## End of Frontend
+
+
## Integrations
/src/sentry/sentry_apps/ @getsentry/product-owners-settings-integrations @getsentry/ecosystem
/tests/sentry/sentry_apps @getsentry/product-owners-settings-integrations @getsentry/ecosystem
@@ -459,13 +471,10 @@ tests/sentry/api/endpoints/test_organization_dashboard_widget_details.py @ge
## Telemetry Experience
-/src/sentry/api/endpoints/organization_ddm.py @getsentry/telemetry-experience
-/tests/sentry/api/endpoints/test_organization_ddm_meta.py @getsentry/telemetry-experience
/src/sentry/api/endpoints/organization_metric* @getsentry/telemetry-experience
/tests/sentry/api/endpoints/test_organization_metric* @getsentry/telemetry-experience
/src/sentry/api/endpoints/organization_sessions.py @getsentry/telemetry-experience
/tests/snuba/api/endpoints/test_organization_sessions.py @getsentry/telemetry-experience
-/src/sentry/api/endpoints/projects_metrics.py @getsentry/telemetry-experience
/tests/sentry/api/endpoints/test_projects_metrics_visibility.py @getsentry/telemetry-experience
/src/sentry/api/endpoints/organization_onboarding* @getsentry/telemetry-experience
/tests/sentry/api/endpoints/test_organization_onboarding* @getsentry/telemetry-experience
@@ -481,11 +490,8 @@ tests/sentry/api/endpoints/test_organization_dashboard_widget_details.py @ge
/src/sentry/sentry_metrics/visibility/ @getsentry/telemetry-experience
/tests/sentry/sentry_metrics/visibility/ @getsentry/telemetry-experience
/src/sentry/sentry_metrics/extraction_rules.py @getsentry/telemetry-experience
-/tests/sentry/sentry_metrics/test_extraction_rules.py @getsentry/telemetry-experience
/src/sentry/snuba/metrics/ @getsentry/telemetry-experience
/tests/sentry/snuba/metrics/ @getsentry/telemetry-experience
-/src/sentry/relay/config/metric_extraction.py @getsentry/telemetry-experience
-/tests/sentry/relay/config/test_metric_extraction.py @getsentry/telemetry-experience
/static/app/actionCreators/metrics.spec.tsx @getsentry/telemetry-experience
/static/app/actionCreators/metrics.tsx @getsentry/telemetry-experience
@@ -497,8 +503,6 @@ tests/sentry/api/endpoints/test_organization_dashboard_widget_details.py @ge
/static/app/types/project.tsx @getsentry/telemetry-experience
/static/app/utils/metrics/ @getsentry/telemetry-experience
/static/app/views/metrics/ @getsentry/telemetry-experience
-/static/app/views/performance/landing/dynamicSamplingMetricsAccuracy.spec.tsx @getsentry/telemetry-experience
-/static/app/views/performance/landing/dynamicSamplingMetricsAccuracyAlert.tsx @getsentry/telemetry-experience
/static/app/views/settings/project/dynamicSampling/ @getsentry/telemetry-experience
/static/app/views/settings/dynamicSampling/ @getsentry/telemetry-experience
/static/app/views/settings/projectMetrics/* @getsentry/telemetry-experience
@@ -631,3 +635,7 @@ tests/sentry/api/endpoints/test_organization_dashboard_widget_details.py @ge
# Taskworkers
/src/sentry/taskworker/ @getsentry/taskworker
/tests/sentry/taskworker/ @getsentry/taskworker
+
+# Tempest
+/src/sentry/tempest/ @getsentry/gdx
+/tests/sentry/tempest/ @getsentry/gdx
diff --git a/.github/workflows/frontend-lint-burndown.yml b/.github/workflows/frontend-lint-burndown.yml
index 21279bc7ed19cb..e9f99589e8db9b 100644
--- a/.github/workflows/frontend-lint-burndown.yml
+++ b/.github/workflows/frontend-lint-burndown.yml
@@ -17,9 +17,9 @@ jobs:
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- - name: Install dependencies & inject eslint-plugin-deprecation
+ - name: Install dependencies
id: dependencies
- run: yarn add --dev eslint-plugin-deprecation
+ run: yarn install
# Setup custom tsc matcher, see https://github.com/actions/setup-node/issues/97
- name: setup matchers
diff --git a/.github/workflows/shuffle-tests.yml b/.github/workflows/shuffle-tests.yml
index 43be8dbf5ff293..4b5013a7310045 100644
--- a/.github/workflows/shuffle-tests.yml
+++ b/.github/workflows/shuffle-tests.yml
@@ -28,18 +28,22 @@ jobs:
name: run backend tests
runs-on: ubuntu-24.04
timeout-minutes: 90
+ permissions:
+ contents: read
+ id-token: write
strategy:
# This helps not having to run multiple jobs because one fails, thus, reducing resource usage
# and reducing the risk that one of many runs would turn red again (read: intermittent tests)
fail-fast: false
matrix:
# XXX: When updating this, make sure you also update MATRIX_INSTANCE_TOTAL.
- instance: [0, 1, 2, 3, 4, 5, 6]
+ instance: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
pg-version: ['14']
env:
- # XXX: MATRIX_INSTANCE_TOTAL must be hardcoded to the length of strategy.matrix.instance.
- MATRIX_INSTANCE_TOTAL: 7
+ # XXX: `MATRIX_INSTANCE_TOTAL` must be hardcoded to the length of `strategy.matrix.instance`.
+ # If this increases, make sure to also increase `flags.backend.after_n_builds` in `codecov.yml`.
+ MATRIX_INSTANCE_TOTAL: 11
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
@@ -48,7 +52,10 @@ jobs:
uses: ./.github/actions/setup-sentry
id: setup
with:
+ redis_cluster: true
+ kafka: true
snuba: true
+ symbolicator: true
# Right now, we run so few bigtable related tests that the
# overhead of running bigtable in all backend tests
# is way smaller than the time it would take to run in its own job.
diff --git a/.github/workflows/test_docker_compose_acceptance.yml b/.github/workflows/test_devservices_acceptance.yml
similarity index 87%
rename from .github/workflows/test_docker_compose_acceptance.yml
rename to .github/workflows/test_devservices_acceptance.yml
index aa81794eb085ba..279c41114d277c 100644
--- a/.github/workflows/test_docker_compose_acceptance.yml
+++ b/.github/workflows/test_devservices_acceptance.yml
@@ -1,10 +1,10 @@
# Also note that this name *MUST* match the filename because GHA
# only provides the workflow name (https://docs.github.com/en/free-pro-team@latest/actions/reference/environment-variables#default-environment-variables)
# and GH APIs only support querying by workflow *FILENAME* (https://developer.github.com/v3/actions/workflows/#get-a-workflow)
-name: test-docker-compose-acceptance
+name: test-devservices-acceptance
on:
schedule:
- - cron: '0 0 * * *'
+ - cron: '0 * * * *'
# Cancel in progress workflows on pull_requests.
# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
@@ -16,10 +16,13 @@ concurrency:
env:
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 3
NODE_OPTIONS: '--max-old-space-size=4096'
+ USE_NEW_DEVSERVICES: 1
+ IS_DEV: 1
+ CHARTCUTERIE_CONFIG_PATH: ${{ github.workspace }}/config/chartcuterie
jobs:
- docker-compose-acceptance:
- name: docker-compose-acceptance
+ devservices-acceptance:
+ name: devservices-acceptance
runs-on: ubuntu-24.04
timeout-minutes: 30
permissions:
@@ -89,15 +92,9 @@ jobs:
uses: ./.github/actions/test-setup-sentry-devservices
id: setup
- - name: copy chartcuterie config to devservices chartcuterie directory
- run: |
- ls config/chartcuterie
- cp -r config/chartcuterie devservices
-
- name: Bring up devservices
run: |
- docker network create sentry
- docker compose -f devservices/docker-compose-testing.yml up -d redis postgres snuba clickhouse chartcuterie
+ devservices up --mode acceptance-ci
- name: Run acceptance tests (#${{ steps.setup.outputs.matrix-instance-number }} of ${{ steps.setup.outputs.matrix-instance-total }})
run: make run-acceptance
@@ -127,14 +124,13 @@ jobs:
- name: Inspect failure
if: failure()
run: |
- docker compose -f devservices/docker-compose-testing.yml ps
- docker compose -f devservices/docker-compose-testing.yml logs --tail 1000
+ devservices logs
- docker-compose-acceptance-required-checks:
+ devservices-acceptance-required-checks:
# this is a required check so we need this job to always run and report a status.
if: always()
- name: Docker Compose Acceptance
- needs: [docker-compose-acceptance]
+ name: Devservices Acceptance
+ needs: [devservices-acceptance]
runs-on: ubuntu-24.04
timeout-minutes: 3
steps:
diff --git a/.github/workflows/test_docker_compose_backend.yml b/.github/workflows/test_devservices_backend.yml
similarity index 75%
rename from .github/workflows/test_docker_compose_backend.yml
rename to .github/workflows/test_devservices_backend.yml
index 179b1efee17362..ec3f0ae5645eab 100644
--- a/.github/workflows/test_docker_compose_backend.yml
+++ b/.github/workflows/test_devservices_backend.yml
@@ -1,9 +1,8 @@
-name: test-docker-compose-backend
+name: test-devservices-backend
on:
schedule:
- - cron: '0 0 * * *'
-
+ - cron: '0 * * * *'
# Cancel in progress workflows on pull_requests.
# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
concurrency:
@@ -13,9 +12,11 @@ concurrency:
# hack for https://github.com/actions/cache/issues/810#issuecomment-1222550359
env:
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 3
+ USE_NEW_DEVSERVICES: 1
+ IS_DEV: 1
jobs:
- docker-compose-api-docs:
+ devservices-api-docs:
name: api docs test
runs-on: ubuntu-24.04
steps:
@@ -31,9 +32,7 @@ jobs:
id: setup
- name: Bring up devservices
- run: |
- docker network create sentry
- docker compose -f devservices/docker-compose-testing.yml up -d redis postgres snuba clickhouse
+ run: devservices up
- name: Run API docs tests
# install ts-node for ts build scripts to execute properly without potentially installing
@@ -44,11 +43,9 @@ jobs:
- name: Inspect failure
if: failure()
- run: |
- docker compose -f devservices/docker-compose-testing.yml ps
- docker compose -f devservices/docker-compose-testing.yml logs --tail 1000
+ run: devservices logs
- docker-compose-backend-test:
+ devservices-backend-test:
name: backend test
runs-on: ubuntu-24.04
timeout-minutes: 60
@@ -73,13 +70,11 @@ jobs:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Setup sentry env
+ id: setup
uses: ./.github/actions/test-setup-sentry-devservices
- name: Bring up devservices
- run: |
- docker network create sentry
- echo "BIGTABLE_EMULATOR_HOST=127.0.0.1:8086" >> $GITHUB_ENV
- docker compose -f devservices/docker-compose-testing.yml up -d
+ run: devservices up --mode backend-ci
- name: Run backend test (${{ steps.setup.outputs.matrix-instance-number }} of ${{ steps.setup.outputs.matrix-instance-total }})
run: |
@@ -107,11 +102,9 @@ jobs:
- name: Inspect failure
if: failure()
- run: |
- docker compose -f devservices/docker-compose-testing.yml ps
- docker compose -f devservices/docker-compose-testing.yml logs --tail 1000
+ run: devservices logs
- docker-compose-backend-migration-tests:
+ devservices-backend-migration-tests:
name: backend migration tests
runs-on: ubuntu-24.04
timeout-minutes: 30
@@ -127,9 +120,7 @@ jobs:
id: setup
- name: Bring up devservices
- run: |
- docker network create sentry
- docker compose -f devservices/docker-compose-testing.yml up -d redis postgres snuba clickhouse
+ run: devservices up
- name: run tests
run: |
@@ -146,11 +137,9 @@ jobs:
- name: Inspect failure
if: failure()
- run: |
- docker compose -f devservices/docker-compose-testing.yml ps
- docker compose -f devservices/docker-compose-testing.yml logs --tail 1000
+ run: devservices logs
- docker-compose-cli:
+ devservices-cli:
name: cli test
runs-on: ubuntu-24.04
timeout-minutes: 10
@@ -165,9 +154,7 @@ jobs:
id: setup
- name: Bring up devservices
- run: |
- docker network create sentry
- docker compose -f devservices/docker-compose-testing.yml up -d redis postgres
+ run: devservices up --mode migrations
- name: Run test
run: |
@@ -184,11 +171,9 @@ jobs:
- name: Inspect failure
if: failure()
- run: |
- docker compose -f devservices/docker-compose-testing.yml ps
- docker compose -f devservices/docker-compose-testing.yml logs --tail 1000
+ run: devservices logs
- docker-compose-migration:
+ devservices-migration:
name: check migration
runs-on: ubuntu-24.04
strategy:
@@ -204,9 +189,7 @@ jobs:
id: setup
- name: Bring up devservices
- run: |
- docker network create sentry
- docker compose -f devservices/docker-compose-testing.yml up -d redis postgres
+ run: devservices up --mode migrations
- name: Migration & lockfile checks
env:
@@ -217,11 +200,9 @@ jobs:
- name: Inspect failure
if: failure()
- run: |
- docker compose -f devservices/docker-compose-testing.yml ps
- docker compose -f devservices/docker-compose-testing.yml logs --tail 1000
+ run: devservices logs
- docker-compose-monolith-dbs:
+ devservices-monolith-dbs:
name: monolith-dbs test
runs-on: ubuntu-24.04
timeout-minutes: 20
@@ -236,9 +217,7 @@ jobs:
id: setup
- name: Bring up devservices
- run: |
- docker network create sentry
- docker compose -f devservices/docker-compose-testing.yml up -d redis postgres
+ run: devservices up --mode migrations
- name: Run test
run: |
@@ -265,24 +244,22 @@ jobs:
- name: Inspect failure
if: failure()
- run: |
- docker compose -f devservices/docker-compose-testing.yml ps
- docker compose -f devservices/docker-compose-testing.yml logs --tail 1000
+ run: devservices logs
# This check runs once all dependent jobs have passed
# It symbolizes that all required Backend checks have succesfully passed (Or skipped)
# This step is the only required backend check
- docker-compose-backend-required-check:
+ devservices-backend-required-check:
needs:
[
- docker-compose-api-docs,
- docker-compose-backend-test,
- docker-compose-backend-migration-tests,
- docker-compose-cli,
- docker-compose-migration,
- docker-compose-monolith-dbs,
+ devservices-api-docs,
+ devservices-backend-test,
+ devservices-backend-migration-tests,
+ devservices-cli,
+ devservices-migration,
+ devservices-monolith-dbs,
]
- name: Docker Compose Backend
+ name: Devservices Backend
# This is necessary since a failed/skipped dependent job would cause this job to be skipped
if: always()
runs-on: ubuntu-24.04
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index bd07e25b9a6872..4a6418b9c0f0f1 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -117,7 +117,7 @@ repos:
- id: eslint
name: eslint
language: system
- files: \.[jt]sx?$
+ files: \.(ts|js|tsx|jsx|mjs)$
entry: ./node_modules/.bin/eslint --quiet --fix
- id: stylelint
diff --git a/CHANGES b/CHANGES
index 84afe672222eeb..0655a554ced755 100644
--- a/CHANGES
+++ b/CHANGES
@@ -1,3 +1,99 @@
+24.12.1
+-------
+
+### Various fixes & improvements
+
+- fix: fixes KeyError when running with stale topic dlq (#82512) by @lynnagara
+- chore(issue-views): Add analytics back to tab actions (#82504) by @MichaelSun48
+- chore(sentry apps): Introduce new error types for sentry apps (#82507) by @Christinarlong
+- fix timezone normalization (#82496) by @kneeyo1
+- ref(tsc): convert teamAccessRequestModal to FC (#82470) by @michellewzhang
+- ref(tsc): convert dashboardWidgetQuerySelectorModal to FC (#82466) by @michellewzhang
+- ref(issue-views): Overhaul issue views state and logic to a new context (#82429) by @MichaelSun48
+- ref: strptime -> fromisoformat in tests (#82488) by @asottile-sentry
+- chore(various): Fix linter warnings (#82494) by @lobsterkatie
+- ref(insights): Split out `getAxisMaxForPercentageSeries` (#82493) by @gggritso
+- fix(ecosystem): Track metrics for issue detail ticket creation (#82436) by @GabeVillalobos
+- ref(aci): pass WorkflowJob into process_workflows (#82489) by @cathteng
+- fix(group-events): Fix typo and error text (#82490) by @leeandher
+- fix(web): Add react_config context on auth pages take 2 (#82480) by @BYK
+- feat(alerts): ACI dual write alert rule helpers (#82400) by @ceorourke
+- feat(dashboards): Pass `LineChart` series meta alongside the data (#82047) by @gggritso
+- fix(eap): Numeric attribute filtering in snql eap (#82472) by @Zylphrex
+- chore(issues): Opt in a few more endpoint tests to stronger types (#82382) by @mrduncan
+- ref: remove calls to iso_format in testutils (#82461) by @asottile-sentry
+- feat(dashboards): enable sorting by column in table view (#82239) by @harshithadurai
+- ref(workflow_engine): remove remaining references to condition in validators (#82438) by @mifu67
+- fix(flags): separate permission class (#82463) by @oioki
+- feat(new-trace): Fixing scroll on trace drawer (#82475) by @Abdkhan14
+- support routing stale messages to lowpri topic (#82322) by @lynnagara
+
+_Plus 240 more_
+
+24.12.0
+-------
+
+### Various fixes & improvements
+
+- chore(utils): allow duplicate values in registry by making reverse lookup optional (#82114) by @cathteng
+- feat(workflow_engine): Add `process_data_packets` method (#82002) by @saponifi3d
+- ref(workflow_engine): Remove DetectorType (#82111) by @saponifi3d
+- chore(aci milestone 3): move aggregation value helpers to incidents directory (#82103) by @mifu67
+- ref(feedback): remove spam detection logs - replaced by redash (#82071) by @aliu39
+- fix(activity): Update activity message for linked issues (#82064) by @snigdhas
+- ref(replay): improve error messages for invalid searches that raise CouldNotParseValue (#82048) by @aliu39
+- fix(toolbar): Include credentials with fetch requests (#82108) by @ryan953
+- feat(autofix): Add support for streamed output (#82024) by @roaga
+- :sparkles: feat(discord): add button to redirect for user linking (#82104) by @iamrajjoshi
+- feat(workflow_engine): Adding support for `process_workflows` in the IssuePlatform (#81975) by @saponifi3d
+- ref: remove skip_for_relay_store (#82106) by @asottile-sentry
+- feat(metric-issues): Configure workflow notifications by group type (#81609) by @snigdhas
+- fix(iphone-codes): update frontend definitions (#82100) by @armcknight
+- ref: improve grouphash_metadata test (#82101) by @asottile-sentry
+- fix(iphone-codes): update BE mapping; remove unused method (#82094) by @armcknight
+- ref: remove xfail_if_not_postgres (#82097) by @asottile-sentry
+- ref: fix typing for endpoints.project_rule_preview (#82089) by @asottile-sentry
+- ref: fix types for test_event_attachment_details (#82091) by @asottile-sentry
+- ref: remove requires_not_arm64 (#82093) by @asottile-sentry
+- chore(aci): enforce config schema without subclassing (#81979) by @cathteng
+- ref: fix types for eventstore.test_base (#82092) by @asottile-sentry
+- chore(stacktrace): Make source map tooltip aligned (#82016) by @MichaelSun48
+- ref: delete unused GroupEnvironmentWithStatsSerializer (#82090) by @asottile-sentry
+
+_Plus 93 more_
+
+24.11.2
+-------
+
+### Various fixes & improvements
+
+- fix(dashboards): Abbreviate `LineChartWidget` Y axis integers (#81937) by @gggritso
+- Revert "chore(profiling): remove profiling.stack_trace_rules.allowed_project_ids option (#81903)" (d0bea1aa) by @getsentry-bot
+- feat(widget-builder): Add limit field to widget builder hook (#81944) by @nikkikapadia
+- fix(alerts): Fix EAP alert filter bar to behave more like explore (#81946) by @edwardgou-sentry
+- feat(alerts): Renames eap metrics in ui to spans (#81917) by @edwardgou-sentry
+- feat(alerts): Limits eap alert time windows and periods (#81916) by @edwardgou-sentry
+- chore(insights): Remove bundle analysis UI flag (#81932) by @gggritso
+- feat(widget-builder): Batch URL param changes (#81923) by @narsaynorath
+- fix(merged): Always show a link for latests event of a merged group (#81947) by @leeandher
+- fix(dashboards): Add missing propagated props in `LineChartWidget` (#81935) by @gggritso
+- chore(profiling): remove profiling.stack_trace_rules.allowed_project_ids option (#81903) by @viglia
+- feat(new-trace): remove prefix related (#81918) by @doralchan
+- :mag: nit(integration slo): cleanup tests (#81943) by @iamrajjoshi
+- ref(widget-builder): Split out tests (#81949) by @narsaynorath
+- fix(trace-view): Web Vitals scores (#81945) by @0Calories
+- ref(insights): Simplify `SpanTimeCharts` (#81931) by @gggritso
+- fix(oauth): only remove the related tokens (#81677) by @sentaur-athena
+- :wrench: chore(integration slo): cleaning up tests and use util method (#81936) by @iamrajjoshi
+- feat(issue summary): Change 3-dot menu to dropdown (#81928) by @roaga
+- feat(sdk): Upgrade @sentry SDKs to v8.43.0 (#81925) by @aliu39
+- Better logging for backpressure (#81648) by @kneeyo1
+- feat(dashboards): add success message when favoriting dashboards (#81887) by @harshithadurai
+- feat(ui): Add dark app loading theme (#81611) by @scttcper
+- ref(dashboards): Export Widget component props (#81924) by @gggritso
+
+_Plus 442 more_
+
24.11.1
-------
diff --git a/api-docs/openapi.json b/api-docs/openapi.json
index f7cba5f6d5345f..a40ec189185cb3 100644
--- a/api-docs/openapi.json
+++ b/api-docs/openapi.json
@@ -15,10 +15,14 @@
},
"servers": [
{
- "url": "https://us.sentry.io/"
- },
- {
- "url": "https://de.sentry.io/"
+ "url": "https://{region}.sentry.io",
+ "variables": {
+ "region": {
+ "default": "us",
+ "description": "The data-storage-location for an organization",
+ "enum": ["us", "de"]
+ }
+ }
}
],
"tags": [
diff --git a/api-docs/paths/events/issue-details.json b/api-docs/paths/events/issue-details.json
index ac00d75f3c5fcb..15cdfb55ec7a70 100644
--- a/api-docs/paths/events/issue-details.json
+++ b/api-docs/paths/events/issue-details.json
@@ -216,7 +216,7 @@
},
"statusDetails": {
"type": "object",
- "description": "Additional details about the status of the issue.",
+ "description": "Additional details about the resolution. Supported values are `\"inRelease\"`, `\"inNextRelease\"`, `\"inCommit\"`, `\"ignoreDuration\"`, `\"ignoreCount\"`, `\"ignoreWindow\"`, `\"ignoreUserCount\"`, and `\"ignoreUserWindow\"`.",
"properties": {
"inNextRelease": {
"type": "boolean",
diff --git a/api-docs/paths/projects/dsyms.json b/api-docs/paths/projects/dsyms.json
index 8221656d080a20..911b76e55585a4 100644
--- a/api-docs/paths/projects/dsyms.json
+++ b/api-docs/paths/projects/dsyms.json
@@ -45,7 +45,7 @@
},
"post": {
"tags": ["Projects"],
- "description": "Upload a new debug information file for the given release.\n\nUnlike other API requests, files must be uploaded using the\ntraditional multipart/form-data content-type.\n\nThe file uploaded is a zip archive of an Apple .dSYM folder which\ncontains the individual debug images. Uploading through this endpoint\nwill create different files for the contained images.",
+ "description": "Upload a new debug information file for the given release.\n\nUnlike other API requests, files must be uploaded using the\ntraditional multipart/form-data content-type.\n\nRequests to this endpoint should use the region-specific domain eg. `us.sentry.io` or `de.sentry.io`.\n\nThe file uploaded is a zip archive of an Apple .dSYM folder which\ncontains the individual debug images. Uploading through this endpoint\nwill create different files for the contained images.",
"operationId": "Upload a New File",
"parameters": [
{
@@ -109,6 +109,9 @@
{
"auth_token": ["project:write"]
}
+ ],
+ "servers": [
+ {"url": "https://{region}.sentry.io"}
]
},
"delete": {
diff --git a/api-docs/paths/releases/project-release-files.json b/api-docs/paths/releases/project-release-files.json
index cfacc62efd3777..695e414037c0bc 100644
--- a/api-docs/paths/releases/project-release-files.json
+++ b/api-docs/paths/releases/project-release-files.json
@@ -74,7 +74,7 @@
},
"post": {
"tags": ["Releases"],
- "description": "Upload a new project release file.",
+ "description": "Upload a new file for the given release.\n\nUnlike other API requests, files must be uploaded using the traditional multipart/form-data content-type.\n\nRequests to this endpoint should use the region-specific domain eg. `us.sentry.io` or `de.sentry.io`\n\nThe optional 'name' attribute should reflect the absolute path that this file will be referenced as. For example, in the case of JavaScript you might specify the full web URI.",
"operationId": "Upload a New Project Release File",
"parameters": [
{
@@ -171,6 +171,9 @@
{
"auth_token": ["project:releases"]
}
+ ],
+ "servers": [
+ {"url": "https://{region}.sentry.io"}
]
}
}
diff --git a/api-docs/paths/releases/release-files.json b/api-docs/paths/releases/release-files.json
index ab1adf49ce89e6..918e26c50e2365 100644
--- a/api-docs/paths/releases/release-files.json
+++ b/api-docs/paths/releases/release-files.json
@@ -65,7 +65,7 @@
},
"post": {
"tags": ["Releases"],
- "description": "Upload a new organization release file.",
+ "description": "Upload a new file for the given release.\n\nUnlike other API requests, files must be uploaded using the traditional multipart/form-data content-type.\n\nRequests to this endpoint should use the region-specific domain eg. `us.sentry.io` or `de.sentry.io`.\n\nThe optional 'name' attribute should reflect the absolute path that this file will be referenced as. For example, in the case of JavaScript you might specify the full web URI.",
"operationId": "Upload a New Organization Release File",
"parameters": [
{
@@ -135,6 +135,9 @@
{
"auth_token": ["project:releases"]
}
+ ],
+ "servers": [
+ {"url": "https://{region}.sentry.io"}
]
}
}
diff --git a/bin/benchmark_codeowners/benchmark b/bin/benchmark_codeowners/benchmark
index 4c71d12e583d0a..78d6b611f9498a 100755
--- a/bin/benchmark_codeowners/benchmark
+++ b/bin/benchmark_codeowners/benchmark
@@ -1,5 +1,7 @@
#!/usr/bin/env python
# isort: skip_file
+# flake8: noqa: S002
+
"""
This script benchmarks the performance of issue owner assignment in Sentry.
@@ -15,7 +17,13 @@ import time
from sentry.models.organization import Organization
from sentry.models.projectownership import ProjectOwnership
from sentry.models.project import Project
+from sentry.models.team import Team
from sentry.utils import json
+import sentry_sdk
+from sentry.models.projectteam import ProjectTeam
+
+# disable sentry as it creates lots of noise in the output
+sentry_sdk.init(None)
def main(code_mapping_file, event_data_file):
@@ -42,6 +50,30 @@ def main(code_mapping_file, event_data_file):
name=project_name, slug=project_slug, id=project_id, organization_id=org.id
)
+ # create teams for all actors
+ teams_to_create = []
+ seen_teams = set()
+ for rule in code_mapping["rules"]:
+ for owner in rule["owners"]:
+ team_name = owner["identifier"]
+ if team_name not in seen_teams:
+ teams_to_create.append(
+ Team(
+ name=team_name,
+ slug=team_name,
+ organization_id=org.id,
+ id=owner["id"],
+ )
+ )
+ seen_teams.add(team_name)
+
+ # delete teams from previous runs
+ Team.objects.filter(id__in=[team.id for team in teams_to_create]).delete()
+
+ Team.objects.bulk_create(teams_to_create)
+ for team in Team.objects.filter(organization_id=org.id):
+ ProjectTeam.objects.create(project_id=project.id, team_id=team.id)
+
# create a projectownership
ProjectOwnership.objects.get_or_create(
project_id=project.id,
@@ -51,9 +83,18 @@ def main(code_mapping_file, event_data_file):
event_data = get_event_data()
start = time.time()
- ProjectOwnership.get_issue_owners(project.id, event_data)
+ issue_owners = ProjectOwnership.get_issue_owners(project.id, event_data)
elapsed_time = time.time() - start
print(f"Time taken: {elapsed_time:.6f} seconds") # noqa
+ print("Ownership rules:")
+ for rule, teams, rule_type in issue_owners:
+ print(f"\nRule:")
+ print(f" Type: {rule_type}")
+ print(f" Pattern: {rule.matcher.pattern}")
+ print(" Teams:")
+ for team in teams: # type: ignore[assignment]
+ if isinstance(team, Team): # Only handle Team objects
+ print(f" - {team.name} (id: {team.id})")
if __name__ == "__main__":
diff --git a/biome.json b/biome.json
index e18ffc69671ea7..24fd4555ce47d2 100644
--- a/biome.json
+++ b/biome.json
@@ -65,7 +65,6 @@
"noMisrefactoredShorthandAssign": "error",
"useAwait": "error",
"useNamespaceKeyword": "error",
- "noSkippedTests": "error",
"noFocusedTests": "error",
"noDuplicateTestHooks": "error"
},
diff --git a/build-utils/sentry-instrumentation.ts b/build-utils/sentry-instrumentation.ts
index c6797c1e22f490..ae502acd76f044 100644
--- a/build-utils/sentry-instrumentation.ts
+++ b/build-utils/sentry-instrumentation.ts
@@ -1,6 +1,6 @@
/* eslint-env node */
+import type {Span} from '@sentry/core';
import type * as Sentry from '@sentry/node';
-import type {Span} from '@sentry/types';
import crypto from 'node:crypto';
import https from 'node:https';
import os from 'node:os';
@@ -71,7 +71,7 @@ class SentryInstrumentation {
sentry.setTag('arch', os.arch());
sentry.setTag(
'cpu',
- cpus?.length ? `${cpus[0].model} (cores: ${cpus.length})}` : 'N/A'
+ cpus?.length ? `${cpus[0]!.model} (cores: ${cpus.length})}` : 'N/A'
);
this.Sentry = sentry;
@@ -96,7 +96,7 @@ class SentryInstrumentation {
.filter(assetName => !assetName.endsWith('.map'))
.forEach(assetName => {
const asset = compilation.assets[assetName];
- const size = asset.size();
+ const size = asset!.size();
const file = assetName;
const body = JSON.stringify({
file,
diff --git a/codecov.yml b/codecov.yml
index 1a981e84c7c040..4b8b260fe743f9 100644
--- a/codecov.yml
+++ b/codecov.yml
@@ -25,7 +25,6 @@ coverage:
- static/app/routes.tsx
- static/app/**/*.stories.tsx
- static/app/**/__stories__/
- - tests/
component_management:
individual_components:
@@ -54,7 +53,7 @@ flags:
after_n_builds: 4
backend:
paths:
- - 'src/sentry/**/*.py'
+ - '**/*.py'
carryforward: true
# Do not send any status checks until n coverage reports are uploaded.
# NOTE: If you change this, make sure to change `comment.after_n_builds` below as well.
diff --git a/config/tsconfig.base.json b/config/tsconfig.base.json
index 326841c8574a4a..e837746b45bb24 100644
--- a/config/tsconfig.base.json
+++ b/config/tsconfig.base.json
@@ -40,6 +40,7 @@
"noImplicitThis": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
+ "noUncheckedIndexedAccess": true,
"strict": true,
"strictBindCallApply": false,
"useUnknownInCatchVariables": false,
diff --git a/devservices/clickhouse/config.xml b/devservices/clickhouse/config.xml
deleted file mode 100644
index 327d60661b29da..00000000000000
--- a/devservices/clickhouse/config.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-
- 0.3
-
- 1
-
-
diff --git a/devservices/config.yml b/devservices/config.yml
index e8e6a5b0d1b2be..f4cf7a263b0160 100644
--- a/devservices/config.yml
+++ b/devservices/config.yml
@@ -25,9 +25,36 @@ x-sentry-service-config:
repo_name: sentry-shared-redis
branch: main
repo_link: https://github.com/getsentry/sentry-shared-redis.git
+ symbolicator:
+ description: A symbolication service for native stacktraces and minidumps with symbol server support
+ remote:
+ repo_name: symbolicator
+ branch: master
+ repo_link: https://github.com/getsentry/symbolicator.git
+ mode: default
+ bigtable:
+ description: Bigtable emulator
+ redis-cluster:
+ description: Redis cluster used for testing
+ chartcuterie:
+ description: Chartcuterie is a service that generates charts
+ remote:
+ repo_name: chartcuterie
+ branch: master
+ repo_link: https://github.com/getsentry/chartcuterie.git
+ taskbroker:
+ description: Service used to process asynchronous tasks
+ remote:
+ repo_name: taskbroker
+ branch: main
+ repo_link: https://github.com/getsentry/taskbroker.git
+ mode: containerized
modes:
default: [snuba, postgres, relay]
migrations: [postgres, redis]
+ acceptance-ci: [postgres, snuba, chartcuterie]
+ taskbroker: [snuba, postgres, relay, taskbroker]
+ backend-ci: [snuba, postgres, redis, bigtable, redis-cluster, symbolicator]
services:
postgres:
@@ -61,6 +88,29 @@ services:
labels:
- orchestrator=devservices
restart: unless-stopped
+ bigtable:
+ image: 'ghcr.io/getsentry/cbtemulator:d28ad6b63e461e8c05084b8c83f1c06627068c04'
+ ports:
+ - '127.0.0.1:8086:8086'
+ networks:
+ - devservices
+ extra_hosts:
+ - host.docker.internal:host-gateway
+ redis-cluster:
+ image: ghcr.io/getsentry/docker-redis-cluster:7.0.10
+ ports:
+ - '127.0.0.1:7000:7000'
+ - '127.0.0.1:7001:7001'
+ - '127.0.0.1:7002:7002'
+ - '127.0.0.1:7003:7003'
+ - '127.0.0.1:7004:7004'
+ - '127.0.0.1:7005:7005'
+ networks:
+ - devservices
+ extra_hosts:
+ - host.docker.internal:host-gateway
+ environment:
+ - IP=0.0.0.0
networks:
devservices:
diff --git a/devservices/docker-compose-testing.yml b/devservices/docker-compose-testing.yml
deleted file mode 100644
index 86e6da6a05053e..00000000000000
--- a/devservices/docker-compose-testing.yml
+++ /dev/null
@@ -1,282 +0,0 @@
-x-restart-policy: &restart_policy
- restart: unless-stopped
-x-depends_on-healthy: &depends_on-healthy
- condition: service_healthy
-x-depends_on-default: &depends_on-default
- condition: service_started
-x-healthcheck-defaults: &healthcheck_defaults
- interval: 30s
- timeout: 1m30s
- retries: 10
- start_period: 10s
-services:
- redis:
- <<: *restart_policy
- container_name: sentry_redis
- image: ghcr.io/getsentry/image-mirror-library-redis:5.0-alpine
- healthcheck:
- <<: *healthcheck_defaults
- test: redis-cli ping
- command:
- [
- 'redis-server',
- '--appendonly',
- 'yes',
- '--save',
- '60',
- '20',
- '--auto-aof-rewrite-percentage',
- '100',
- '--auto-aof-rewrite-min-size',
- '64mb',
- ]
- volumes:
- - 'sentry-redis:/data'
- ports:
- - '6379:6379'
- networks:
- - sentry
- extra_hosts:
- host.docker.internal: host-gateway
- postgres:
- <<: *restart_policy
- container_name: sentry_postgres
- # Using the same postgres version as Sentry dev for consistency purposes
- image: 'ghcr.io/getsentry/image-mirror-library-postgres:14-alpine'
- healthcheck:
- <<: *healthcheck_defaults
- # Using default user "postgres" from sentry/sentry.conf.example.py or value of POSTGRES_USER if provided
- test: ['CMD-SHELL', 'pg_isready -U ${POSTGRES_USER:-postgres}']
- 'command':
- [
- 'postgres',
- '-c',
- 'wal_level=logical',
- '-c',
- 'max_replication_slots=1',
- '-c',
- 'max_wal_senders=1',
- ]
- environment:
- POSTGRES_HOST_AUTH_METHOD: 'trust'
- POSTGRES_DB: 'sentry'
- volumes:
- - 'sentry-postgres:/var/lib/postgresql/data'
- ports:
- - '5432:5432'
- networks:
- - sentry
- extra_hosts:
- host.docker.internal: host-gateway
- kafka:
- <<: *restart_policy
- image: 'ghcr.io/getsentry/image-mirror-confluentinc-cp-kafka:7.5.0'
- container_name: sentry_kafka
- environment:
- # https://docs.confluent.io/platform/current/installation/docker/config-reference.html#cp-kakfa-example
- KAFKA_PROCESS_ROLES: 'broker,controller'
- KAFKA_CONTROLLER_QUORUM_VOTERS: '1@127.0.0.1:29093'
- KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
- KAFKA_NODE_ID: '1'
- CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk'
- KAFKA_LISTENERS: 'PLAINTEXT://0.0.0.0:29092,INTERNAL://0.0.0.0:9093,EXTERNAL://0.0.0.0:9092,CONTROLLER://0.0.0.0:29093'
- KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://127.0.0.1:29092,INTERNAL://kafka:9093,EXTERNAL://127.0.0.1:9092'
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'PLAINTEXT:PLAINTEXT,INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT,CONTROLLER:PLAINTEXT'
- KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: '1'
- KAFKA_OFFSETS_TOPIC_NUM_PARTITIONS: '1'
- KAFKA_LOG_RETENTION_HOURS: '24'
- KAFKA_MESSAGE_MAX_BYTES: '50000000' #50MB or bust
- KAFKA_MAX_REQUEST_SIZE: '50000000' #50MB on requests apparently too
- volumes:
- - 'sentry-kafka:/var/lib/kafka/data'
- - 'sentry-kafka-log:/var/lib/kafka/log'
- healthcheck:
- <<: *healthcheck_defaults
- test: ['CMD-SHELL', 'nc -z localhost 9092']
- interval: 10s
- timeout: 10s
- retries: 30
- ports:
- - '9092:9092'
- - '9093:9093'
- networks:
- - sentry
- extra_hosts:
- host.docker.internal: host-gateway
- clickhouse:
- <<: *restart_policy
- container_name: sentry_clickhouse
- image: 'ghcr.io/getsentry/image-mirror-altinity-clickhouse-server:23.8.11.29.altinitystable'
- ulimits:
- nofile:
- soft: 262144
- hard: 262144
- volumes:
- - 'sentry-clickhouse:/var/lib/clickhouse'
- - 'sentry-clickhouse-log:/var/log/clickhouse-server'
- - type: bind
- read_only: true
- source: ./clickhouse/config.xml
- target: /etc/clickhouse-server/config.d/sentry.xml
- healthcheck:
- test: [
- 'CMD-SHELL',
- # Manually override any http_proxy envvar that might be set, because
- # this wget does not support no_proxy. See:
- # https://github.com/getsentry/self-hosted/issues/1537
- "http_proxy='' wget -nv -t1 --spider 'http://localhost:8123/' || exit 1",
- ]
- interval: 10s
- timeout: 10s
- retries: 30
- ports:
- - '8123:8123'
- - '9000:9000'
- - '9009:9009'
- networks:
- - sentry
- extra_hosts:
- host.docker.internal: host-gateway
- symbolicator:
- <<: *restart_policy
- container_name: sentry_symbolicator
- image: 'us-central1-docker.pkg.dev/sentryio/symbolicator/image:nightly'
- volumes:
- - 'sentry-symbolicator:/data'
- - type: bind
- read_only: true
- source: ./symbolicator
- target: /etc/symbolicator
- command: run -c /etc/symbolicator/config.yml
- ports:
- - '3021:3021'
- networks:
- - sentry
- extra_hosts:
- host.docker.internal: host-gateway
- vroom:
- <<: *restart_policy
- container_name: sentry_vroom
- image: 'us-central1-docker.pkg.dev/sentryio/vroom/vroom:latest'
- environment:
- SENTRY_KAFKA_BROKERS_PROFILING: 'sentry_kafka:9092'
- SENTRY_KAFKA_BROKERS_OCCURRENCES: 'sentry_kafka:9092'
- SENTRY_BUCKET_PROFILES: file://localhost//var/lib/sentry-profiles
- SENTRY_SNUBA_HOST: 'http://snuba-api:1218'
- volumes:
- - sentry-vroom:/var/lib/sentry-profiles
- depends_on:
- kafka:
- <<: *depends_on-healthy
- ports:
- - '8085:8085'
- networks:
- - sentry
- extra_hosts:
- host.docker.internal: host-gateway
- snuba:
- <<: *restart_policy
- container_name: sentry_snuba
- image: ghcr.io/getsentry/snuba:latest
- ports:
- - '1218:1218'
- - '1219:1219'
- networks:
- - sentry
- command: ['devserver']
- environment:
- PYTHONUNBUFFERED: '1'
- SNUBA_SETTINGS: docker
- DEBUG: '1'
- CLICKHOUSE_HOST: 'clickhouse'
- CLICKHOUSE_PORT: '9000'
- CLICKHOUSE_HTTP_PORT: '8123'
- DEFAULT_BROKERS: 'kafka:9093'
- REDIS_HOST: 'redis'
- REDIS_PORT: '6379'
- REDIS_DB: '1'
- ENABLE_SENTRY_METRICS_DEV: '${ENABLE_SENTRY_METRICS_DEV:-}'
- ENABLE_PROFILES_CONSUMER: '${ENABLE_PROFILES_CONSUMER:-}'
- ENABLE_SPANS_CONSUMER: '${ENABLE_SPANS_CONSUMER:-}'
- ENABLE_ISSUE_OCCURRENCE_CONSUMER: '${ENABLE_ISSUE_OCCURRENCE_CONSUMER:-}'
- ENABLE_AUTORUN_MIGRATION_SEARCH_ISSUES: '1'
- ENABLE_GROUP_ATTRIBUTES_CONSUMER: '${ENABLE_GROUP_ATTRIBUTES_CONSUMER:-}'
- platform: linux/amd64
- depends_on:
- - kafka
- - redis
- - clickhouse
- extra_hosts:
- host.docker.internal: host-gateway
- bigtable:
- <<: *restart_policy
- container_name: sentry_bigtable
- image: 'ghcr.io/getsentry/cbtemulator:d28ad6b63e461e8c05084b8c83f1c06627068c04'
- ports:
- - '8086:8086'
- networks:
- - sentry
- extra_hosts:
- host.docker.internal: host-gateway
- redis-cluster:
- <<: *restart_policy
- container_name: sentry_redis-cluster
- image: ghcr.io/getsentry/docker-redis-cluster:7.0.10
- ports:
- - '7000:7000'
- - '7001:7001'
- - '7002:7002'
- - '7003:7003'
- - '7004:7004'
- - '7005:7005'
- networks:
- - sentry
- volumes:
- - sentry-redis-cluster:/redis-data
- environment:
- - IP=0.0.0.0
- chartcuterie:
- <<: *restart_policy
- container_name: sentry_chartcuterie
- image: 'us-central1-docker.pkg.dev/sentryio/chartcuterie/image:latest'
- environment:
- CHARTCUTERIE_CONFIG: /etc/chartcuterie/config.js
- CHARTCUTERIE_CONFIG_POLLING: true
- volumes:
- - ./chartcuterie:/etc/chartcuterie
- ports:
- - '7901:9090'
- networks:
- - sentry
- extra_hosts:
- host.docker.internal: host-gateway
- healthcheck:
- <<: *healthcheck_defaults
- # Using default user "postgres" from sentry/sentry.conf.example.py or value of POSTGRES_USER if provided
- test:
- [
- 'CMD-SHELL',
- 'docker exec sentry_chartcuterie python3 -c "import urllib.request; urllib.request.urlopen(\"http://127.0.0.1:9090/api/chartcuterie/healthcheck/live\", timeout=5)"',
- ]
-
-volumes:
- # These store application data that should persist across restarts.
- sentry-data:
- sentry-postgres:
- sentry-redis:
- sentry-redis-cluster:
- sentry-kafka:
- sentry-clickhouse:
- sentry-symbolicator:
- # This volume stores profiles and should be persisted.
- # Not being external will still persist data across restarts.
- # It won't persist if someone does a docker compose down -v.
- sentry-vroom:
- sentry-kafka-log:
- sentry-clickhouse-log:
-
-networks:
- sentry:
- name: sentry
- external: true
diff --git a/devservices/symbolicator/config.yml b/devservices/symbolicator/config.yml
deleted file mode 100644
index 290d752a6dd04c..00000000000000
--- a/devservices/symbolicator/config.yml
+++ /dev/null
@@ -1,11 +0,0 @@
-bind: '0.0.0.0:3021'
-logging:
- level: 'debug'
- format: 'pretty'
- enable_backtraces: true
-
-# explicitly disable caches as it's not something we want in tests. in
-# development it may be less ideal. perhaps we should do the same thing as we
-# do with relay one day (one container per test/session), although that will be
-# slow
-cache_dir: null
diff --git a/eslint.config.mjs b/eslint.config.mjs
new file mode 100644
index 00000000000000..10f0a1b91fbfae
--- /dev/null
+++ b/eslint.config.mjs
@@ -0,0 +1,838 @@
+// @ts-check
+/**
+ * Understanding & making changes to this file:
+ *
+ * This is your friend:
+ * `npx eslint --inspect-config`
+ */
+import * as emotion from '@emotion/eslint-plugin';
+import prettier from 'eslint-config-prettier';
+import importPlugin from 'eslint-plugin-import';
+import jest from 'eslint-plugin-jest';
+import jestDom from 'eslint-plugin-jest-dom';
+import react from 'eslint-plugin-react';
+import reactHooks from 'eslint-plugin-react-hooks';
+import sentry from 'eslint-plugin-sentry';
+import simpleImportSort from 'eslint-plugin-simple-import-sort';
+import testingLibrary from 'eslint-plugin-testing-library';
+import typescriptSortKeys from 'eslint-plugin-typescript-sort-keys';
+import globals from 'globals';
+import invariant from 'invariant';
+// biome-ignore lint/correctness/noNodejsModules: Need to get the list of things!
+import {builtinModules} from 'node:module';
+import typescript from 'typescript-eslint';
+
+invariant(react.configs.flat, 'For typescript');
+
+const restrictedImportPatterns = [
+ {
+ group: ['sentry/components/devtoolbar/*'],
+ message: 'Do not depend on toolbar internals',
+ },
+];
+
+const restrictedImportPaths = [
+ {
+ name: '@testing-library/react',
+ message:
+ 'Please import from `sentry-test/reactTestingLibrary` instead so that we can ensure consistency throughout the codebase',
+ },
+ {
+ name: '@testing-library/react-hooks',
+ message:
+ 'Please import from `sentry-test/reactTestingLibrary` instead so that we can ensure consistency throughout the codebase',
+ },
+ {
+ name: '@testing-library/user-event',
+ message:
+ 'Please import from `sentry-test/reactTestingLibrary` instead so that we can ensure consistency throughout the codebase',
+ },
+ {
+ name: '@sentry/browser',
+ message:
+ 'Please import from `@sentry/react` to ensure consistency throughout the codebase.',
+ },
+ {
+ name: 'marked',
+ message:
+ "Please import marked from 'app/utils/marked' so that we can ensure sanitation of marked output",
+ },
+ {
+ name: 'lodash',
+ message:
+ "Please import lodash utilities individually. e.g. `import isEqual from 'lodash/isEqual';`. See https://github.com/getsentry/frontend-handbook#lodash from for information",
+ },
+ {
+ name: 'lodash/get',
+ message:
+ 'Optional chaining `?.` and nullish coalescing operators `??` are available and preferred over using `lodash/get`. See https://github.com/getsentry/frontend-handbook#new-syntax for more information',
+ },
+ {
+ name: 'sentry/utils/theme',
+ importNames: ['lightColors', 'darkColors'],
+ message:
+ "'lightColors' and 'darkColors' exports intended for use in Storybook only. Instead, use theme prop from emotion or the useTheme hook.",
+ },
+ {
+ name: 'react-router',
+ importNames: ['withRouter'],
+ message:
+ "Use 'useLocation', 'useParams', 'useNavigate', 'useRoutes' from sentry/utils instead.",
+ },
+ {
+ name: 'sentry/utils/withSentryRouter',
+ message:
+ "Use 'useLocation', 'useParams', 'useNavigate', 'useRoutes' from sentry/utils instead.",
+ },
+ {
+ name: 'qs',
+ message: 'Please use query-string instead of qs',
+ },
+ {
+ name: 'moment',
+ message: 'Please import moment-timezone instead of moment',
+ },
+];
+
+// Used by both: `languageOptions` & `parserOptions`
+const ecmaVersion = 6; // TODO(ryan953): change to 'latest'
+
+/**
+ * To get started with this ESLint Configuration list be sure to read at least
+ * these sections of the docs:
+ * - https://eslint.org/docs/latest/use/configure/configuration-files#specifying-files-and-ignores
+ * - https://eslint.org/docs/latest/use/configure/configuration-files#cascading-configuration-objects
+ */
+
+export default typescript.config([
+ {
+ // Main parser & linter options
+ // Rules are defined below and inherit these properties
+ // https://eslint.org/docs/latest/use/configure/configuration-files#configuration-objects
+ name: 'eslint/global/languageOptions',
+ languageOptions: {
+ ecmaVersion,
+ sourceType: 'module',
+ globals: {
+ // TODO(ryan953): globals.browser seems to have a bug with trailing whitespace
+ ...Object.fromEntries(
+ Object.keys(globals.browser).map(key => [key.trim(), false])
+ ),
+ ...globals.jest,
+ MockApiClient: true,
+ tick: true,
+ },
+ parser: typescript.parser,
+ parserOptions: {
+ ecmaFeatures: {
+ globalReturn: false,
+ },
+ ecmaVersion,
+
+ // https://typescript-eslint.io/packages/parser/#emitdecoratormetadata
+ emitDecoratorMetadata: undefined,
+
+ // https://typescript-eslint.io/packages/parser/#experimentaldecorators
+ experimentalDecorators: undefined,
+
+ // https://typescript-eslint.io/packages/parser/#jsdocparsingmode
+ jsDocParsingMode: process.env.SENTRY_DETECT_DEPRECATIONS ? 'all' : 'none',
+
+ // https://typescript-eslint.io/packages/parser/#project
+ project: process.env.SENTRY_DETECT_DEPRECATIONS ? './tsconfig.json' : false,
+
+ // https://typescript-eslint.io/packages/parser/#projectservice
+ // `projectService` is recommended, but slower, with our current tsconfig files.
+ // projectService: true,
+ // tsconfigRootDir: import.meta.dirname,
+ },
+ },
+ linterOptions: {
+ noInlineConfig: false,
+ reportUnusedDisableDirectives: 'error',
+ },
+ settings: {
+ react: {
+ version: '18.2.0',
+ defaultVersion: '18.2',
+ },
+ 'import/parsers': {'@typescript-eslint/parser': ['.ts', '.tsx']},
+ 'import/resolver': {typescript: {}},
+ 'import/extensions': ['.js', '.jsx'],
+ },
+ },
+ {
+ name: 'eslint/global/files',
+ // Default file selection
+ // https://eslint.org/docs/latest/use/configure/configuration-files#specifying-files-and-ignores
+ files: ['**/*.js', '**/*.mjs', '**/*.ts', '**/*.jsx', '**/*.tsx'],
+ },
+ {
+ name: 'eslint/global/ignores',
+ // Global ignores
+ // https://eslint.org/docs/latest/use/configure/configuration-files#globally-ignoring-files-with-ignores
+ ignores: [
+ '.devenv/**/*',
+ '.github/**/*',
+ '.mypy_cache/**/*',
+ '.pytest_cache/**/*',
+ '.venv/**/*',
+ '**/*.benchmark.ts',
+ '**/*.d.ts',
+ '**/dist/**/*',
+ '**/tests/**/fixtures/**/*',
+ '**/vendor/**/*',
+ 'build-utils/**/*',
+ 'config/chartcuterie/config.js', // TODO: see if this file exists
+ 'fixtures/artifact_bundle/**/*',
+ 'fixtures/artifact_bundle_debug_ids/**/*',
+ 'fixtures/artifact_bundle_duplicated_debug_ids/**/*',
+ 'fixtures/profiles/embedded.js',
+ 'jest.config.ts',
+ 'api-docs/**/*',
+ 'src/sentry/static/sentry/js/**/*',
+ 'src/sentry/templates/sentry/**/*',
+ 'stylelint.config.js',
+ ],
+ },
+ /**
+ * Global Rules
+ * Any ruleset that does not include `files` or `ignores` fields
+ *
+ * Plugins are configured within each configuration object.
+ * https://eslint.org/docs/latest/use/configure/configuration-files#configuration-objects
+ *
+ * Rules are grouped by plugin. If you want to override a specific rule inside
+ * the recommended set, then it's recommended to spread the new rule on top
+ * of the predefined ones.
+ *
+ * For example: if you want to enable a new plugin in the codebase and their
+ * recommended rules (or a new rule that's part of an existing plugin)
+ * First you'd setup a configuration object for that plugin:
+ * {
+ * name: 'my-plugin/recommended',
+ * ...myPlugin.configs.recommended,
+ * },
+ * Second you'd override the rule you want to deal with, maybe making it a
+ * warning to start:
+ * {
+ * name: 'my-plugin/recommended',
+ * ...myPlugin.configs.recommended,
+ * rules: {
+ * ...myPlugin.configs.recommended.rules,
+ * ['the-rule']: 'warn',
+ * }
+ * },
+ * Finally, once all warnings are fixed, update from 'warning' to 'error', or
+ * remove the override and rely on the recommended rules again.
+ */
+ {
+ name: 'eslint/rules',
+ rules: {
+ // https://eslint.org/docs/rules/strict
+ strict: ['error', 'global'],
+
+ /**
+ * Variables
+ */
+ // https://eslint.org/docs/rules/no-shadow-restricted-names
+ 'no-shadow-restricted-names': 'error',
+
+ /**
+ * Possible errors
+ */
+ // https://eslint.org/docs/rules/no-cond-assign
+ 'no-cond-assign': ['error', 'always'],
+
+ // https://eslint.org/docs/rules/no-alert
+ 'no-alert': 'error',
+
+ // https://eslint.org/docs/rules/no-constant-condition
+ 'no-constant-condition': 'warn',
+
+ // https://eslint.org/docs/rules/no-empty
+ 'no-empty': 'error',
+
+ // https://eslint.org/docs/rules/no-ex-assign
+ 'no-ex-assign': 'error',
+
+ // https://eslint.org/docs/rules/no-extra-boolean-cast
+ 'no-extra-boolean-cast': 'error',
+
+ // https://eslint.org/docs/rules/no-func-assign
+ 'no-func-assign': 'error',
+
+ // https://eslint.org/docs/rules/no-inner-declarations
+ 'no-inner-declarations': 'error',
+
+ // https://eslint.org/docs/rules/no-invalid-regexp
+ 'no-invalid-regexp': 'error',
+
+ // https://eslint.org/docs/rules/no-irregular-whitespace
+ 'no-irregular-whitespace': 'error',
+
+ // https://eslint.org/docs/rules/no-obj-calls
+ 'no-obj-calls': 'error',
+
+ // https://eslint.org/docs/rules/no-sparse-arrays
+ 'no-sparse-arrays': 'error',
+
+ // https://eslint.org/docs/rules/block-scoped-var
+ 'block-scoped-var': 'error',
+
+ /**
+ * Best practices
+ */
+ // https://eslint.org/docs/rules/consistent-return
+ 'consistent-return': 'error',
+
+ // https://eslint.org/docs/rules/default-case
+ 'default-case': 'error',
+
+ // https://eslint.org/docs/rules/dot-notation
+ 'dot-notation': ['error', {allowKeywords: true}],
+
+ // https://eslint.org/docs/rules/guard-for-in [REVISIT ME]
+ 'guard-for-in': 'off',
+
+ // https://eslint.org/docs/rules/no-caller
+ 'no-caller': 'error',
+
+ // https://eslint.org/docs/rules/no-eval
+ 'no-eval': 'error',
+
+ // https://eslint.org/docs/rules/no-extend-native
+ 'no-extend-native': 'error',
+
+ // https://eslint.org/docs/rules/no-extra-bind
+ 'no-extra-bind': 'error',
+
+ // https://eslint.org/docs/rules/no-fallthrough
+ 'no-fallthrough': 'error',
+
+ // https://eslint.org/docs/rules/no-floating-decimal
+ 'no-floating-decimal': 'error',
+
+ // https://eslint.org/docs/rules/no-implied-eval
+ 'no-implied-eval': 'error',
+
+ // https://eslint.org/docs/rules/no-lone-blocks
+ 'no-lone-blocks': 'error',
+
+ // https://eslint.org/docs/rules/no-loop-func
+ 'no-loop-func': 'error',
+
+ // https://eslint.org/docs/rules/no-multi-str
+ 'no-multi-str': 'error',
+
+ // https://eslint.org/docs/rules/no-native-reassign
+ 'no-native-reassign': 'error',
+
+ // https://eslint.org/docs/rules/no-new
+ 'no-new': 'error',
+
+ // https://eslint.org/docs/rules/no-new-func
+ 'no-new-func': 'error',
+
+ // https://eslint.org/docs/rules/no-new-wrappers
+ 'no-new-wrappers': 'error',
+
+ // https://eslint.org/docs/rules/no-octal
+ 'no-octal': 'error',
+
+ // https://eslint.org/docs/rules/no-octal-escape
+ 'no-octal-escape': 'error',
+
+ // https://eslint.org/docs/rules/no-param-reassign [REVISIT ME]
+ 'no-param-reassign': 'off',
+
+ // https://eslint.org/docs/rules/no-proto
+ 'no-proto': 'error',
+
+ // https://eslint.org/docs/rules/no-return-assign
+ 'no-return-assign': 'error',
+
+ // https://eslint.org/docs/rules/no-script-url
+ 'no-script-url': 'error',
+
+ // https://eslint.org/docs/rules/no-self-compare
+ 'no-self-compare': 'error',
+
+ // https://eslint.org/docs/rules/no-sequences
+ 'no-sequences': 'error',
+
+ // https://eslint.org/docs/rules/no-throw-literal
+ 'no-throw-literal': 'error',
+
+ // https://eslint.org/docs/rules/no-with
+ 'no-with': 'error',
+
+ // https://eslint.org/docs/rules/radix
+ radix: 'error',
+
+ // https://eslint.org/docs/rules/object-shorthand
+ 'object-shorthand': ['error', 'properties'],
+
+ // https://eslint.org/docs/rules/vars-on-top
+ 'vars-on-top': 'off',
+
+ // https://eslint.org/docs/rules/wrap-iife
+ 'wrap-iife': ['error', 'any'],
+
+ // https://eslint.org/docs/rules/array-callback-return
+ 'array-callback-return': 'error',
+
+ // https://eslint.org/docs/rules/yoda
+ yoda: 'error',
+
+ // https://eslint.org/docs/rules/no-else-return
+ 'no-else-return': ['error', {allowElseIf: false}],
+
+ // https://eslint.org/docs/rules/require-await
+ 'require-await': 'error',
+
+ // https://eslint.org/docs/rules/multiline-comment-style
+ 'multiline-comment-style': ['error', 'separate-lines'],
+
+ // https://eslint.org/docs/rules/spaced-comment
+ 'spaced-comment': [
+ 'error',
+ 'always',
+ {
+ line: {markers: ['/'], exceptions: ['-', '+']},
+ block: {exceptions: ['*'], balanced: true},
+ },
+ ],
+
+ // Let formatter handle this
+ 'arrow-body-style': 'off',
+
+ /**
+ * Restricted imports, e.g. deprecated libraries, etc
+ *
+ * See: https://eslint.org/docs/rules/no-restricted-imports
+ */
+ 'no-restricted-imports': [
+ 'error',
+ {
+ patterns: restrictedImportPatterns,
+ paths: restrictedImportPaths,
+ },
+ ],
+
+ // https://eslint.org/docs/rules/no-console
+ 'no-console': 'error',
+ },
+ },
+ {
+ ...importPlugin.flatConfigs.recommended,
+ name: 'plugin/import',
+ rules: {
+ // We override all the rules that are in the recommended, react, and typescript rulesets
+
+ // From the recommended ruleset:
+ // https://github.com/import-js/eslint-plugin-import/blob/main/docs/rules/export.md
+ 'import/export': 'error',
+
+ // 5 rules not recommended to be enabled with typescript-eslint
+ // https://typescript-eslint.io/troubleshooting/typed-linting/performance/#slow-eslint-rules
+ 'import/named': 'off',
+ 'import/namespace': 'off',
+ 'import/default': 'off',
+ 'import/no-named-as-default-member': 'off',
+ 'import/no-unresolved': 'off',
+
+ // Require a newline after the last import/require in a group
+ // Why doesn't prettier handle this? https://prettier.io/docs/en/rationale.html#empty-lines
+ // https://github.com/benmosher/eslint-plugin-import/blob/main/docs/rules/newline-after-import.md
+ 'import/newline-after-import': 'error',
+
+ // do not allow a default import name to match a named export (airbnb: error)
+ // https://github.com/benmosher/eslint-plugin-import/blob/main/docs/rules/no-named-as-default.md
+ 'import/no-named-as-default': 'off',
+
+ // Prevent importing the default as if it were named
+ // https://github.com/benmosher/eslint-plugin-import/blob/main/docs/rules/no-named-default.md
+ 'import/no-named-default': 'error',
+
+ // disallow AMD require/define
+ // https://github.com/benmosher/eslint-plugin-import/blob/main/docs/rules/no-amd.md
+ 'import/no-amd': 'error',
+
+ // disallow duplicate imports
+ // https://github.com/benmosher/eslint-plugin-import/blob/main/docs/rules/no-duplicates.md
+ 'import/no-duplicates': 'error',
+
+ // Forbid import of modules using absolute paths
+ // https://github.com/benmosher/eslint-plugin-import/blob/main/docs/rules/no-absolute-path.md
+ 'import/no-absolute-path': 'error',
+
+ // Forbid Webpack loader syntax in imports
+ // https://github.com/benmosher/eslint-plugin-import/blob/main/docs/rules/no-webpack-loader-syntax.md
+ 'import/no-webpack-loader-syntax': 'error',
+
+ // Reports if a module"s default export is unnamed
+ // https://github.com/benmosher/eslint-plugin-import/blob/main/docs/rules/no-anonymous-default-export.md
+ 'import/no-anonymous-default-export': 'error',
+ },
+ },
+ {
+ name: 'plugin/react',
+ plugins: {
+ ...react.configs.flat.recommended.plugins,
+ // @ts-ignore noUncheckedIndexedAccess
+ ...react.configs.flat['jsx-runtime'].plugins,
+ },
+ rules: {
+ ...react.configs.flat.recommended.rules,
+ // @ts-ignore noUncheckedIndexedAccess
+ ...react.configs.flat['jsx-runtime'].rules,
+
+ // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/display-name.md
+ 'react/display-name': 'off',
+
+ // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-multi-comp.md
+ 'react/no-multi-comp': ['off', {ignoreStateless: true}],
+
+ // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-fragments.md
+ 'react/jsx-fragments': ['error', 'element'],
+
+ // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-handler-names.md
+ // Ensures that any component or prop methods used to handle events are correctly prefixed.
+ 'react/jsx-handler-names': [
+ 'off',
+ {eventHandlerPrefix: 'handle', eventHandlerPropPrefix: 'on'},
+ ],
+
+ // Disabled as we use the newer JSX transform babel plugin.
+ // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-uses-react.md
+ 'react/jsx-uses-react': 'off',
+
+ // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-did-mount-set-state.md
+ 'react/no-did-mount-set-state': 'error',
+
+ // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-did-update-set-state.md"
+ 'react/no-did-update-set-state': 'error',
+
+ // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-redundant-should-component-update.md
+ 'react/no-redundant-should-component-update': 'error',
+
+ // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-typos.md
+ 'react/no-typos': 'error',
+
+ // Prevent invalid characters from appearing in markup
+ // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unescaped-entities.md
+ 'react/no-unescaped-entities': 'off',
+
+ // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unknown-property.md
+ 'react/no-unknown-property': ['error', {ignore: ['css']}],
+
+ // We do not need proptypes since we're using typescript
+ 'react/prop-types': 'off',
+
+ // Disabled as we are using the newer JSX transform babel plugin.
+ // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/react-in-jsx-scope.md
+ 'react/react-in-jsx-scope': 'off',
+
+ // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/self-closing-comp.md
+ 'react/self-closing-comp': 'error',
+
+ // This also causes issues with typescript
+ // See: https://github.com/yannickcr/eslint-plugin-react/issues/2066
+ //
+ // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/sort-comp.md
+ 'react/sort-comp': 'warn',
+
+ // Consistent (never add ={true})
+ // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-boolean-value.md
+ 'react/jsx-boolean-value': ['error', 'never'],
+
+ // Consistent function component declaration styles
+ // https://github.com/jsx-eslint/eslint-plugin-react/blob/master/docs/rules/function-component-definition.md
+ 'react/function-component-definition': [
+ 'error',
+ {namedComponents: 'function-declaration'},
+ ],
+ },
+ },
+ {
+ name: 'plugin/react-hooks',
+ plugins: {'react-hooks': reactHooks},
+ rules: {
+ 'react-hooks/rules-of-hooks': 'error',
+ 'react-hooks/exhaustive-deps': [
+ 'error',
+ {additionalHooks: '(useEffectAfterFirstRender|useMemoWithPrevious)'},
+ ],
+ },
+ },
+ {
+ name: 'plugin/@typescript-eslint',
+ plugins: {'@typescript-eslint': typescript.plugin},
+ rules: {
+ // no-undef is redundant with typescript as tsc will complain
+ // A downside is that we won't get eslint errors about it, but your editors should
+ // support tsc errors so....
+ // https://eslint.org/docs/rules/no-undef
+ 'no-undef': 'off',
+
+ /**
+ * Need to use typescript version of these rules
+ * https://eslint.org/docs/rules/no-shadow
+ */
+ 'no-shadow': 'off',
+ '@typescript-eslint/no-shadow': 'error',
+
+ // This only override the `args` rule (which is "none"). There are too many errors and it's difficult to manually
+ // fix them all, so we'll have to incrementally update.
+ // https://eslint.org/docs/rules/no-unused-vars
+ 'no-unused-vars': 'off',
+ '@typescript-eslint/no-unused-vars': [
+ 'error',
+ {
+ vars: 'all',
+ args: 'all',
+ // TODO(scttcper): We could enable this to enforce catch (error)
+ // https://eslint.org/docs/latest/rules/no-unused-vars#caughterrors
+ caughtErrors: 'none',
+
+ // Ignore vars that start with an underscore
+ // e.g. if you want to omit a property using object spread:
+ //
+ // const {name: _name, ...props} = this.props;
+ //
+ varsIgnorePattern: '^_',
+ argsIgnorePattern: '^_',
+ destructuredArrayIgnorePattern: '^_',
+ },
+ ],
+
+ // https://eslint.org/docs/rules/no-use-before-define
+ 'no-use-before-define': 'off',
+ // This seems to have been turned on while previously it had been off
+ '@typescript-eslint/no-use-before-define': 'off',
+
+ // https://github.com/xojs/eslint-config-xo-typescript/blob/9791a067d6a119a21a4db72c02f1da95e25ffbb6/index.js#L95
+ '@typescript-eslint/no-restricted-types': [
+ 'error',
+ {
+ types: {
+ // TODO(scttcper): Turn object on to make our types more strict
+ // object: {
+ // message: 'The `object` type is hard to use. Use `Record` instead. See: https://github.com/typescript-eslint/typescript-eslint/pull/848',
+ // fixWith: 'Record'
+ // },
+ Buffer: {
+ message:
+ 'Use Uint8Array instead. See: https://sindresorhus.com/blog/goodbye-nodejs-buffer',
+ suggest: ['Uint8Array'],
+ },
+ '[]': "Don't use the empty array type `[]`. It only allows empty arrays. Use `SomeType[]` instead.",
+ '[[]]':
+ "Don't use `[[]]`. It only allows an array with a single element which is an empty array. Use `SomeType[][]` instead.",
+ '[[[]]]': "Don't use `[[[]]]`. Use `SomeType[][][]` instead.",
+ },
+ },
+ ],
+ // TODO(scttcper): Turn no-empty-object-type on to make our types more strict
+ // '@typescript-eslint/no-empty-object-type': 'error',
+ // TODO(scttcper): Turn no-function on to make our types more strict
+ // '@typescript-eslint/no-unsafe-function-type': 'error',
+ '@typescript-eslint/no-wrapper-object-types': 'error',
+
+ // Naming convention enforcements
+ '@typescript-eslint/naming-convention': [
+ 'error',
+ {
+ selector: 'typeLike',
+ format: ['PascalCase'],
+ leadingUnderscore: 'allow',
+ },
+ {
+ selector: 'enumMember',
+ format: ['UPPER_CASE'],
+ },
+ ],
+ },
+ },
+ {
+ name: 'plugin/@typescript-eslint && process.env.SENTRY_DETECT_DEPRECATIONS',
+ rules: {
+ '@typescript-eslint/no-deprecated': process.env.SENTRY_DETECT_DEPRECATIONS
+ ? 'error'
+ : 'off',
+ },
+ },
+ {
+ name: 'plugin/typescript-sort-keys',
+ plugins: {'typescript-sort-keys': typescriptSortKeys},
+ rules: {
+ 'typescript-sort-keys/interface': [
+ 'error',
+ 'asc',
+ {caseSensitive: true, natural: false, requiredFirst: true},
+ ],
+ },
+ },
+ {
+ name: 'plugin/simple-import-sort',
+ plugins: {'simple-import-sort': simpleImportSort},
+ rules: {
+ /**
+ * Better import sorting
+ */
+ 'sort-imports': 'off',
+ 'simple-import-sort/imports': [
+ 'error',
+ {
+ groups: [
+ // Side effect imports.
+ ['^\\u0000'],
+
+ // Node.js builtins.
+ [`^(${builtinModules.join('|')})(/|$)`],
+
+ // Packages. `react` related packages come first.
+ ['^react', '^@?\\w'],
+
+ // Test should be separate from the app
+ ['^(sentry-test|getsentry-test)(/.*|$)'],
+
+ // Internal packages.
+ ['^(sentry-locale|sentry-images)(/.*|$)'],
+
+ ['^(getsentry-images)(/.*|$)'],
+
+ ['^(app|sentry)(/.*|$)'],
+
+ // Getsentry packages.
+ ['^(admin|getsentry)(/.*|$)'],
+
+ // Style imports.
+ ['^.+\\.less$'],
+
+ // Parent imports. Put `..` last.
+ ['^\\.\\.(?!/?$)', '^\\.\\./?$'],
+
+ // Other relative imports. Put same-folder imports and `.` last.
+ ['^\\./(?=.*/)(?!/?$)', '^\\.(?!/?$)', '^\\./?$'],
+ ],
+ },
+ ],
+ },
+ },
+ {
+ name: 'plugin/sentry',
+ plugins: {sentry},
+ rules: {
+ 'sentry/no-digits-in-tn': 'error',
+ 'sentry/no-dynamic-translations': 'error',
+ 'sentry/no-styled-shortcut': 'error',
+ },
+ },
+ {
+ name: 'plugin/@emotion',
+ plugins: {'@emotion': emotion},
+ rules: {
+ '@emotion/import-from-emotion': 'off', // Not needed, in v11 we import from @emotion/react
+ '@emotion/jsx-import': 'off', // Not needed, handled by babel
+ '@emotion/no-vanilla': 'error',
+ '@emotion/pkg-renaming': 'off', // Not needed, we have migrated to v11 and the old package names cannot be used anymore
+ '@emotion/styled-import': 'error',
+ '@emotion/syntax-preference': ['off', 'string'], // TODO(ryan953): Enable this so `css={css``}` is required
+ },
+ },
+ {
+ name: 'plugin/jest',
+ files: ['**/*.spec.{ts,js,tsx,jsx}', 'tests/js/**/*.{ts,js,tsx,jsx}'],
+ plugins: jest.configs['flat/recommended'].plugins,
+ rules: {
+ ...jest.configs['flat/recommended'].rules,
+ ...jest.configs['flat/style'].rules,
+
+ // `recommended` set this to warn, we've upgraded to error
+ 'jest/no-disabled-tests': 'error',
+
+ // `recommended` set this to warn, we've downgraded to off
+ // Disabled as we have many tests which render as simple validations
+ 'jest/expect-expect': 'off',
+
+ // Disabled as we have some comment out tests that cannot be
+ // uncommented due to typescript errors.
+ 'jest/no-commented-out-tests': 'off', // TODO(ryan953): Fix violations then delete this line
+
+ // Disabled as we do sometimes have conditional expects
+ 'jest/no-conditional-expect': 'off', // TODO(ryan953): Fix violations then delete this line
+
+ // We don't recommend snapshots, but if there are any keep it small
+ 'jest/no-large-snapshots': ['error', {maxSize: 2000}],
+ },
+ },
+ {
+ name: 'plugin/jest-dom',
+ files: ['**/*.spec.{ts,js,tsx,jsx}', 'tests/js/**/*.{ts,js,tsx,jsx}'],
+ ...jestDom.configs['flat/recommended'],
+ },
+ {
+ name: 'plugin/testing-library',
+ files: ['**/*.spec.{ts,js,tsx,jsx}', 'tests/js/**/*.{ts,js,tsx,jsx}'],
+ ...testingLibrary.configs['flat/react'],
+ rules: {
+ ...testingLibrary.configs['flat/react'].rules,
+ 'testing-library/render-result-naming-convention': 'off',
+ 'testing-library/no-unnecessary-act': 'off',
+ },
+ },
+ {
+ name: 'plugin/prettier',
+ ...prettier,
+ },
+ {
+ name: 'files/devtoolbar',
+ files: ['static/app/components/devtoolbar/**/*.{ts,tsx}'],
+ rules: {
+ 'no-restricted-imports': [
+ 'error',
+ {
+ paths: [
+ ...restrictedImportPaths,
+ {
+ name: 'sentry/utils/queryClient',
+ message:
+ 'Import from `@tanstack/react-query` and `./hooks/useFetchApiData` or `./hooks/useFetchInfiniteApiData` instead.',
+ },
+ ],
+ },
+ ],
+ },
+ },
+ {
+ name: 'files/sentry-test',
+ files: ['**/*.spec.{ts,js,tsx,jsx}', 'tests/js/**/*.{ts,js,tsx,jsx}'],
+ rules: {
+ 'no-restricted-imports': [
+ 'error',
+ {
+ patterns: restrictedImportPatterns,
+ paths: [
+ ...restrictedImportPaths,
+ {
+ name: 'sentry/locale',
+ message: 'Translations are not needed in tests.',
+ },
+ ],
+ },
+ ],
+ },
+ },
+ {
+ // We specify rules explicitly for the sdk-loader here so we do not have
+ // eslint ignore comments included in the source file, which is consumed
+ // by users.
+ name: 'files/js-sdk-loader.ts',
+ files: ['**/js-sdk-loader.ts'],
+ rules: {
+ 'no-console': 'off',
+ },
+ },
+]);
diff --git a/fixtures/backup/model_dependencies/detailed.json b/fixtures/backup/model_dependencies/detailed.json
index 36a9e3dbd3f9f0..fa37af84c3316f 100644
--- a/fixtures/backup/model_dependencies/detailed.json
+++ b/fixtures/backup/model_dependencies/detailed.json
@@ -3223,22 +3223,6 @@
]
]
},
- "sentry.metricskeyindexer": {
- "dangling": false,
- "foreign_keys": {},
- "model": "sentry.metricskeyindexer",
- "relocation_dependencies": [],
- "relocation_scope": "Excluded",
- "silos": [
- "Region"
- ],
- "table_name": "sentry_metricskeyindexer",
- "uniques": [
- [
- "string"
- ]
- ]
- },
"sentry.monitor": {
"dangling": false,
"foreign_keys": {
@@ -6242,6 +6226,34 @@
]
]
},
+ "tempest.tempestcredentials": {
+ "dangling": false,
+ "foreign_keys": {
+ "created_by_id": {
+ "kind": "HybridCloudForeignKey",
+ "model": "sentry.user",
+ "nullable": true
+ },
+ "project": {
+ "kind": "FlexibleForeignKey",
+ "model": "sentry.project",
+ "nullable": false
+ }
+ },
+ "model": "tempest.tempestcredentials",
+ "relocation_dependencies": [],
+ "relocation_scope": "Excluded",
+ "silos": [
+ "Region"
+ ],
+ "table_name": "tempest_tempestcredentials",
+ "uniques": [
+ [
+ "client_id",
+ "project"
+ ]
+ ]
+ },
"uptime.projectuptimesubscription": {
"dangling": false,
"foreign_keys": {
@@ -6302,6 +6314,26 @@
]
]
},
+ "uptime.uptimesubscriptionregion": {
+ "dangling": false,
+ "foreign_keys": {
+ "uptime_subscription": {
+ "kind": "FlexibleForeignKey",
+ "model": "uptime.uptimesubscription",
+ "nullable": false
+ }
+ },
+ "model": "uptime.uptimesubscriptionregion",
+ "relocation_dependencies": [],
+ "relocation_scope": "Excluded",
+ "silos": [
+ "Region"
+ ],
+ "table_name": "uptime_uptimesubscriptionregion",
+ "uniques": [
+ []
+ ]
+ },
"workflow_engine.action": {
"dangling": false,
"foreign_keys": {
@@ -6691,4 +6723,4 @@
]
]
}
-}
\ No newline at end of file
+}
diff --git a/fixtures/backup/model_dependencies/flat.json b/fixtures/backup/model_dependencies/flat.json
index a0d8ea7b429b94..dbab6290805ea0 100644
--- a/fixtures/backup/model_dependencies/flat.json
+++ b/fixtures/backup/model_dependencies/flat.json
@@ -447,7 +447,6 @@
"sentry.lostpasswordhash": [
"sentry.user"
],
- "sentry.metricskeyindexer": [],
"sentry.monitor": [
"sentry.organization",
"sentry.project",
@@ -857,6 +856,10 @@
"social_auth.usersocialauth": [
"sentry.user"
],
+ "tempest.tempestcredentials": [
+ "sentry.project",
+ "sentry.user"
+ ],
"uptime.projectuptimesubscription": [
"sentry.environment",
"sentry.project",
@@ -865,6 +868,9 @@
"uptime.uptimesubscription"
],
"uptime.uptimesubscription": [],
+ "uptime.uptimesubscriptionregion": [
+ "uptime.uptimesubscription"
+ ],
"workflow_engine.action": [
"sentry.integration"
],
@@ -924,4 +930,4 @@
"workflow_engine.dataconditiongroup",
"workflow_engine.workflow"
]
-}
\ No newline at end of file
+}
diff --git a/fixtures/backup/model_dependencies/sorted.json b/fixtures/backup/model_dependencies/sorted.json
index 35b2892abcf3a4..dd666a1a710ca4 100644
--- a/fixtures/backup/model_dependencies/sorted.json
+++ b/fixtures/backup/model_dependencies/sorted.json
@@ -18,7 +18,6 @@
"sentry.identityprovider",
"sentry.integration",
"sentry.integrationfeature",
- "sentry.metricskeyindexer",
"sentry.monitorlocation",
"sentry.option",
"sentry.organization",
@@ -49,6 +48,7 @@
"sentry.userroleuser",
"social_auth.usersocialauth",
"uptime.uptimesubscription",
+ "uptime.uptimesubscriptionregion",
"workflow_engine.action",
"workflow_engine.dataconditiongroup",
"workflow_engine.dataconditiongroupaction",
@@ -117,6 +117,7 @@
"workflow_engine.detector",
"workflow_engine.datasourcedetector",
"uptime.projectuptimesubscription",
+ "tempest.tempestcredentials",
"sentry.userreport",
"sentry.useroption",
"sentry.useremail",
@@ -246,4 +247,4 @@
"sentry.incidentsnapshot",
"sentry.incidentproject",
"sentry.incidentactivity"
-]
\ No newline at end of file
+]
diff --git a/fixtures/backup/model_dependencies/truncate.json b/fixtures/backup/model_dependencies/truncate.json
index 39ed0286c7e715..415a6f5e10ec8b 100644
--- a/fixtures/backup/model_dependencies/truncate.json
+++ b/fixtures/backup/model_dependencies/truncate.json
@@ -18,7 +18,6 @@
"sentry_identityprovider",
"sentry_integration",
"sentry_integrationfeature",
- "sentry_metricskeyindexer",
"sentry_monitorlocation",
"sentry_option",
"sentry_organization",
@@ -49,6 +48,7 @@
"sentry_userrole_users",
"social_auth_usersocialauth",
"uptime_uptimesubscription",
+ "uptime_uptimesubscriptionregion",
"workflow_engine_action",
"workflow_engine_dataconditiongroup",
"workflow_engine_dataconditiongroupaction",
@@ -117,6 +117,7 @@
"workflow_engine_detector",
"workflow_engine_datasourcedetector",
"uptime_projectuptimesubscription",
+ "tempest_tempestcredentials",
"sentry_userreport",
"sentry_useroption",
"sentry_useremail",
@@ -246,4 +247,4 @@
"sentry_incidentsnapshot",
"sentry_incidentproject",
"sentry_incidentactivity"
-]
\ No newline at end of file
+]
diff --git a/fixtures/sdk_crash_detection/crash_event_android.py b/fixtures/sdk_crash_detection/crash_event_android.py
index bf6634215561a9..d7990c3ecb4cfb 100644
--- a/fixtures/sdk_crash_detection/crash_event_android.py
+++ b/fixtures/sdk_crash_detection/crash_event_android.py
@@ -96,6 +96,22 @@ def get_apex_crash_event(
)
+def get_exception(
+ frames: Sequence[Mapping[str, str]],
+ mechanism=None,
+) -> dict[str, object]:
+ if mechanism is None:
+ # linter complains about mutable arguments otherwise
+ mechanism = {"type": "onerror", "handled": False}
+ return {
+ "type": "IllegalArgumentException",
+ "value": "SDK Crash",
+ "module": "java.lang",
+ "stacktrace": {"frames": frames},
+ "mechanism": mechanism,
+ }
+
+
def get_crash_event_with_frames(frames: Sequence[Mapping[str, str]], **kwargs) -> dict[str, object]:
result = {
"event_id": "0a52a8331d3b45089ebd74f8118d4fa1",
@@ -103,17 +119,7 @@ def get_crash_event_with_frames(frames: Sequence[Mapping[str, str]], **kwargs) -
"dist": "2",
"platform": "java",
"environment": "debug",
- "exception": {
- "values": [
- {
- "type": "IllegalArgumentException",
- "value": "SDK Crash",
- "module": "java.lang",
- "stacktrace": {"frames": frames},
- "mechanism": {"type": "onerror", "handled": False},
- }
- ]
- },
+ "exception": {"values": [get_exception(frames)]},
"key_id": "1336851",
"level": "fatal",
"contexts": {
diff --git a/fixtures/sdk_crash_detection/crash_event_react_native.py b/fixtures/sdk_crash_detection/crash_event_react_native.py
index a700e30f46c1c8..8884cb115a600c 100644
--- a/fixtures/sdk_crash_detection/crash_event_react_native.py
+++ b/fixtures/sdk_crash_detection/crash_event_react_native.py
@@ -46,6 +46,18 @@ def get_frames(filename: str) -> Sequence[MutableMapping[str, str]]:
return frames
+def get_exception(
+ frames: Sequence[Mapping[str, str]],
+ mechanism_type: str = "onerror",
+) -> dict[str, object]:
+ return {
+ "type": "Error",
+ "value": "Uncaught Thrown Error",
+ "stacktrace": {"frames": frames},
+ "mechanism": {"type": mechanism_type, "handled": False},
+ }
+
+
def get_crash_event(
filename="/Users/sentry.user/git-repos/sentry-react-native/dist/js/client.js", **kwargs
) -> dict[str, object]:
@@ -60,16 +72,7 @@ def get_crash_event_with_frames(frames: Sequence[Mapping[str, str]], **kwargs) -
"platform": "javascript",
"message": "",
"environment": "dev",
- "exception": {
- "values": [
- {
- "type": "Error",
- "value": "Uncaught Thrown Error",
- "stacktrace": {"frames": frames},
- "mechanism": {"type": "onerror", "handled": False},
- }
- ]
- },
+ "exception": {"values": [get_exception(frames)]},
"key_id": "3554525",
"level": "fatal",
"contexts": {
diff --git a/fixtures/search-syntax/explicit_number_tag.json b/fixtures/search-syntax/explicit_number_tag.json
new file mode 100644
index 00000000000000..57337d8d6f7fda
--- /dev/null
+++ b/fixtures/search-syntax/explicit_number_tag.json
@@ -0,0 +1,43 @@
+[
+ {
+ "query": "tags[foo,number]:456 release:1.2.1 tags[project_id,number]:123",
+ "result": [
+ {"type": "spaces", "value": ""},
+ {
+ "type": "filter",
+ "filter": "text",
+ "negated": false,
+ "key": {
+ "type": "keyExplicitNumberTag",
+ "prefix": "tags",
+ "key": {"type": "keySimple", "value": "foo", "quoted": false}
+ },
+ "operator": "",
+ "value": {"type": "valueText", "value": "456", "quoted": false}
+ },
+ {"type": "spaces", "value": " "},
+ {
+ "type": "filter",
+ "filter": "text",
+ "negated": false,
+ "key": {"type": "keySimple", "value": "release", "quoted": false},
+ "operator": "",
+ "value": {"type": "valueText", "value": "1.2.1", "quoted": false}
+ },
+ {"type": "spaces", "value": " "},
+ {
+ "type": "filter",
+ "filter": "text",
+ "negated": false,
+ "key": {
+ "type": "keyExplicitNumberTag",
+ "prefix": "tags",
+ "key": {"type": "keySimple", "value": "project_id", "quoted": false}
+ },
+ "operator": "",
+ "value": {"type": "valueText", "value": "123", "quoted": false}
+ },
+ {"type": "spaces", "value": ""}
+ ]
+ }
+]
diff --git a/fixtures/search-syntax/explicit_number_tags_in_filter.json b/fixtures/search-syntax/explicit_number_tags_in_filter.json
new file mode 100644
index 00000000000000..2e7cd62cc6321d
--- /dev/null
+++ b/fixtures/search-syntax/explicit_number_tags_in_filter.json
@@ -0,0 +1,34 @@
+[
+ {
+ "query": "tags[foo,number]:[123, 456]",
+ "result": [
+ {"type": "spaces", "value": ""},
+ {
+ "type": "filter",
+ "filter": "textIn",
+ "negated": false,
+ "key": {
+ "type": "keyExplicitNumberTag",
+ "prefix": "tags",
+ "key": {"type": "keySimple", "value": "foo", "quoted": false}
+ },
+ "operator": "",
+ "value": {
+ "type": "valueTextList",
+ "items": [
+ {
+ "separator": "",
+ "value": {"type": "valueText", "value": "123", "quoted": false}
+ },
+ {
+ "separator": ", ",
+ "value": {"type": "valueText", "value": "456", "quoted": false}
+ }
+ ]
+ }
+ },
+ {"type": "spaces", "value": ""}
+ ]
+ }
+]
+
diff --git a/fixtures/search-syntax/explicit_string_tag.json b/fixtures/search-syntax/explicit_string_tag.json
new file mode 100644
index 00000000000000..8dd326ff2ed9fe
--- /dev/null
+++ b/fixtures/search-syntax/explicit_string_tag.json
@@ -0,0 +1,43 @@
+[
+ {
+ "query": "tags[fruit,string]:apple release:1.2.1 tags[project_id,string]:123",
+ "result": [
+ {"type": "spaces", "value": ""},
+ {
+ "type": "filter",
+ "filter": "text",
+ "negated": false,
+ "key": {
+ "type": "keyExplicitStringTag",
+ "prefix": "tags",
+ "key": {"type": "keySimple", "value": "fruit", "quoted": false}
+ },
+ "operator": "",
+ "value": {"type": "valueText", "value": "apple", "quoted": false}
+ },
+ {"type": "spaces", "value": " "},
+ {
+ "type": "filter",
+ "filter": "text",
+ "negated": false,
+ "key": {"type": "keySimple", "value": "release", "quoted": false},
+ "operator": "",
+ "value": {"type": "valueText", "value": "1.2.1", "quoted": false}
+ },
+ {"type": "spaces", "value": " "},
+ {
+ "type": "filter",
+ "filter": "text",
+ "negated": false,
+ "key": {
+ "type": "keyExplicitStringTag",
+ "prefix": "tags",
+ "key": {"type": "keySimple", "value": "project_id", "quoted": false}
+ },
+ "operator": "",
+ "value": {"type": "valueText", "value": "123", "quoted": false}
+ },
+ {"type": "spaces", "value": ""}
+ ]
+ }
+]
diff --git a/fixtures/search-syntax/explicit_string_tags_in_filter.json b/fixtures/search-syntax/explicit_string_tags_in_filter.json
new file mode 100644
index 00000000000000..ebf6b7038d9da8
--- /dev/null
+++ b/fixtures/search-syntax/explicit_string_tags_in_filter.json
@@ -0,0 +1,65 @@
+[
+ {
+ "query": "tags[fruit,string]:[apple, pear]",
+ "result": [
+ {"type": "spaces", "value": ""},
+ {
+ "type": "filter",
+ "filter": "textIn",
+ "negated": false,
+ "key": {
+ "type": "keyExplicitStringTag",
+ "prefix": "tags",
+ "key": {"type": "keySimple", "value": "fruit", "quoted": false}
+ },
+ "operator": "",
+ "value": {
+ "type": "valueTextList",
+ "items": [
+ {
+ "separator": "",
+ "value": {"type": "valueText", "value": "apple", "quoted": false}
+ },
+ {
+ "separator": ", ",
+ "value": {"type": "valueText", "value": "pear", "quoted": false}
+ }
+ ]
+ }
+ },
+ {"type": "spaces", "value": ""}
+ ]
+ },
+ {
+ "query": "tags[fruit,string]:[\"apple wow\", \"pear\"]",
+ "result": [
+ {"type": "spaces", "value": ""},
+ {
+ "type": "filter",
+ "filter": "textIn",
+ "negated": false,
+ "key": {
+ "type": "keyExplicitStringTag",
+ "prefix": "tags",
+ "key": {"type": "keySimple", "value": "fruit", "quoted": false}
+ },
+ "operator": "",
+ "value": {
+ "type": "valueTextList",
+ "items": [
+ {
+ "separator": "",
+ "value": {"type": "valueText", "value": "apple wow", "quoted": true}
+ },
+ {
+ "separator": ", ",
+ "value": {"type": "valueText", "value": "pear", "quoted": true}
+ }
+ ]
+ }
+ },
+ {"type": "spaces", "value": ""}
+ ]
+ }
+]
+
diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt
index ef62e79b102a35..8110b79486af6a 100644
--- a/migrations_lockfile.txt
+++ b/migrations_lockfile.txt
@@ -15,10 +15,12 @@ remote_subscriptions: 0003_drop_remote_subscription
replays: 0004_index_together
-sentry: 0802_remove_grouping_auto_update_option
+sentry: 0804_delete_metrics_key_indexer_pt2
social_auth: 0002_default_auto_field
-uptime: 0018_add_trace_sampling_field_to_uptime
+tempest: 0001_create_tempest_credentials_model
-workflow_engine: 0015_create_rule_lookup_tables
+uptime: 0021_drop_region_table_col
+
+workflow_engine: 0019_drop_dataconditions_condition
diff --git a/package.json b/package.json
index 0a27d0d93efa85..d630a938fb33f8 100644
--- a/package.json
+++ b/package.json
@@ -56,13 +56,13 @@
"@sentry-internal/rrweb": "2.26.0",
"@sentry-internal/rrweb-player": "2.26.0",
"@sentry-internal/rrweb-snapshot": "2.26.0",
- "@sentry/core": "8.39.0-beta.0",
- "@sentry/node": "8.39.0-beta.0",
- "@sentry/react": "8.39.0-beta.0",
+ "@sentry/core": "8.43.0",
+ "@sentry/node": "8.43.0",
+ "@sentry/react": "8.43.0",
"@sentry/release-parser": "^1.3.1",
"@sentry/status-page-list": "^0.3.0",
- "@sentry/types": "8.39.0-beta.0",
- "@sentry/utils": "8.39.0-beta.0",
+ "@sentry/types": "8.43.0",
+ "@sentry/utils": "8.43.0",
"@sentry/webpack-plugin": "^2.22.4",
"@spotlightjs/spotlight": "^2.0.0-alpha.1",
"@tanstack/react-query": "^5.56.2",
@@ -177,32 +177,34 @@
"@biomejs/biome": "^1.9.1",
"@codecov/webpack-plugin": "^1.2.0",
"@emotion/eslint-plugin": "^11.12.0",
+ "@eslint/compat": "^1.2.4",
+ "@eslint/eslintrc": "^3.2.0",
+ "@eslint/js": "^9.17.0",
"@pmmmwh/react-refresh-webpack-plugin": "0.5.15",
"@sentry/jest-environment": "6.0.0",
- "@sentry/profiling-node": "8.39.0-beta.0",
+ "@sentry/profiling-node": "8.43.0",
"@styled/typescript-styled-plugin": "^1.0.1",
"@testing-library/dom": "10.1.0",
"@testing-library/jest-dom": "6.4.5",
"@testing-library/react": "16.0.0",
"@testing-library/user-event": "14.5.2",
"@types/node": "^22.9.1",
- "@typescript-eslint/eslint-plugin": "^8.8.1",
- "@typescript-eslint/parser": "^8.8.1",
"babel-gettext-extractor": "^4.1.3",
"babel-jest": "29.7.0",
"benchmark": "^2.1.4",
- "eslint": "8.57.1",
- "eslint-import-resolver-typescript": "^3.6.3",
+ "eslint": "^9.17.0",
+ "eslint-config-prettier": "^9.1.0",
+ "eslint-import-resolver-typescript": "^3.7.0",
"eslint-plugin-import": "^2.31.0",
- "eslint-plugin-jest": "^28.8.3",
- "eslint-plugin-jest-dom": "^5.4.0",
- "eslint-plugin-no-lookahead-lookbehind-regexp": "0.1.0",
- "eslint-plugin-react": "^7.37.1",
- "eslint-plugin-react-hooks": "^4.6.2",
+ "eslint-plugin-jest": "^28.10.0",
+ "eslint-plugin-jest-dom": "^5.5.0",
+ "eslint-plugin-react": "^7.37.3",
+ "eslint-plugin-react-hooks": "5.0.0",
"eslint-plugin-sentry": "^2.10.0",
"eslint-plugin-simple-import-sort": "^12.1.1",
- "eslint-plugin-testing-library": "^6.3.0",
+ "eslint-plugin-testing-library": "^7.1.1",
"eslint-plugin-typescript-sort-keys": "^3.3.0",
+ "globals": "^15.14.0",
"html-webpack-plugin": "^5.6.0",
"jest": "29.7.0",
"jest-canvas-mock": "^2.5.2",
@@ -215,6 +217,7 @@
"stylelint-config-recommended": "^14.0.1",
"terser": "5.31.6",
"tsconfig-paths": "^4.2.0",
+ "typescript-eslint": "^8.18.2",
"webpack-dev-server": "5.1.0"
},
"resolutions": {
@@ -232,12 +235,12 @@
"test-precommit": "node scripts/test.js --bail --findRelatedTests -u",
"test-staged": "node scripts/test.js --findRelatedTests $(git diff --name-only --cached)",
"lint": "yarn lint:biome && yarn lint:prettier && yarn lint:js && yarn lint:css",
- "lint:js": "eslint . --ext .js,.ts,.tsx",
+ "lint:js": "eslint",
"lint:css": "stylelint '**/*.[jt]sx'",
"lint:biome": "biome check .",
"lint:prettier": "prettier \"**/*.md\" \"**/*.yaml\" \"**/*.[jt]s(x)?\" --check --log-level=error",
"fix": "yarn fix:biome && yarn fix:prettier && yarn fix:eslint",
- "fix:eslint": "eslint . --ext .js,.ts,.tsx --fix",
+ "fix:eslint": "eslint --fix",
"fix:biome": "biome check . --write",
"fix:prettier": "prettier \"**/*.md\" \"**/*.yaml\" \"**/*.[jt]s(x)?\" --write --log-level=error",
"dev": "(yarn check --verify-tree || yarn install --check-files) && sentry devserver",
diff --git a/pyproject.toml b/pyproject.toml
index 83c9759ec9c211..0a21b614af9296 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -120,7 +120,6 @@ ignore_missing_imports = true
module = [
"sentry.api.base",
"sentry.api.bases.organization_events",
- "sentry.api.bases.organizationmember",
"sentry.api.bases.team",
"sentry.api.endpoints.accept_organization_invite",
"sentry.api.endpoints.auth_config",
@@ -132,7 +131,6 @@ module = [
"sentry.api.endpoints.index",
"sentry.api.endpoints.internal.mail",
"sentry.api.endpoints.organization_details",
- "sentry.api.endpoints.organization_events",
"sentry.api.endpoints.organization_events_facets_performance",
"sentry.api.endpoints.organization_events_meta",
"sentry.api.endpoints.organization_events_spans_performance",
@@ -146,18 +144,14 @@ module = [
"sentry.api.endpoints.organization_releases",
"sentry.api.endpoints.organization_request_project_creation",
"sentry.api.endpoints.organization_search_details",
- "sentry.api.endpoints.organization_sessions",
"sentry.api.endpoints.organization_stats",
"sentry.api.endpoints.project_index",
"sentry.api.endpoints.project_ownership",
"sentry.api.endpoints.project_release_files",
"sentry.api.endpoints.project_repo_path_parsing",
- "sentry.api.endpoints.project_rule_preview",
"sentry.api.endpoints.project_rules_configuration",
- "sentry.api.endpoints.project_servicehook_stats",
"sentry.api.endpoints.project_transaction_names",
"sentry.api.endpoints.team_details",
- "sentry.api.endpoints.team_release_count",
"sentry.api.endpoints.user_subscriptions",
"sentry.api.event_search",
"sentry.api.helpers.group_index.index",
@@ -167,7 +161,6 @@ module = [
"sentry.api.permissions",
"sentry.api.serializers.models.auth_provider",
"sentry.api.serializers.models.dashboard",
- "sentry.api.serializers.models.environment",
"sentry.api.serializers.models.event",
"sentry.api.serializers.models.group",
"sentry.api.serializers.models.group_stream",
@@ -176,7 +169,6 @@ module = [
"sentry.api.serializers.models.project",
"sentry.api.serializers.models.role",
"sentry.api.serializers.models.rule",
- "sentry.api.serializers.models.team",
"sentry.api.serializers.rest_framework.mentions",
"sentry.auth.helper",
"sentry.auth.provider",
@@ -214,7 +206,6 @@ module = [
"sentry.integrations.gitlab.client",
"sentry.integrations.gitlab.integration",
"sentry.integrations.gitlab.issues",
- "sentry.integrations.jira.actions.form",
"sentry.integrations.jira.client",
"sentry.integrations.jira.integration",
"sentry.integrations.jira.views.base",
@@ -285,10 +276,7 @@ module = [
"sentry.scim.endpoints.utils",
"sentry.search.events.builder.errors",
"sentry.search.events.builder.metrics",
- "sentry.search.events.datasets.discover",
"sentry.search.events.datasets.filter_aliases",
- "sentry.search.events.datasets.function_aliases",
- "sentry.search.events.datasets.metrics",
"sentry.search.events.datasets.metrics_layer",
"sentry.search.events.fields",
"sentry.search.events.filter",
@@ -302,8 +290,6 @@ module = [
"sentry.snuba.metrics.datasource",
"sentry.snuba.metrics.fields.base",
"sentry.snuba.metrics.query_builder",
- "sentry.snuba.sessions_v2",
- "sentry.snuba.spans_indexed",
"sentry.snuba.spans_metrics",
"sentry.tagstore.snuba.backend",
"sentry.tagstore.types",
@@ -324,17 +310,11 @@ module = [
"sentry_plugins.bitbucket.mixins",
"sentry_plugins.github.plugin",
"sentry_plugins.jira.plugin",
- "tests.sentry.api.bases.test_organization",
- "tests.sentry.api.bases.test_project",
- "tests.sentry.api.bases.test_team",
"tests.sentry.api.endpoints.notifications.test_notification_actions_details",
"tests.sentry.api.endpoints.notifications.test_notification_actions_index",
- "tests.sentry.api.endpoints.test_event_attachment_details",
"tests.sentry.api.helpers.test_group_index",
- "tests.sentry.api.test_authentication",
"tests.sentry.api.test_base",
"tests.sentry.api.test_event_search",
- "tests.sentry.eventstore.test_base",
"tests.sentry.issues.test_utils",
]
disable_error_code = [
@@ -360,13 +340,22 @@ disable_error_code = [
# begin: stronger typing
[[tool.mypy.overrides]]
module = [
+ "fixtures.safe_migrations_apps.*",
+ "sentry.analytics.*",
+ "sentry.api.endpoints.integrations.sentry_apps.installation.external_issue.*",
"sentry.api.endpoints.project_backfill_similar_issues_embeddings_records",
+ "sentry.api.endpoints.release_thresholds.health_checks.*",
+ "sentry.api.endpoints.relocations.artifacts.*",
"sentry.api.helpers.deprecation",
"sentry.api.helpers.source_map_helper",
+ "sentry.api.serializers.models.organization_member.*",
+ "sentry.audit_log.services.*",
"sentry.auth.services.*",
"sentry.auth.view",
"sentry.buffer.*",
"sentry.build.*",
+ "sentry.data_secrecy.models.*",
+ "sentry.data_secrecy.service.*",
"sentry.db.models.fields.citext",
"sentry.db.models.fields.foreignkey",
"sentry.db.models.fields.hybrid_cloud_foreign_key",
@@ -375,31 +364,82 @@ module = [
"sentry.db.models.paranoia",
"sentry.db.models.utils",
"sentry.deletions.*",
+ "sentry.digests.*",
"sentry.digests.notifications",
+ "sentry.dynamic_sampling.models.*",
+ "sentry.dynamic_sampling.rules.biases.*",
+ "sentry.dynamic_sampling.rules.combinators.*",
+ "sentry.dynamic_sampling.rules.helpers.*",
+ "sentry.dynamic_sampling.tasks.helpers.*",
+ "sentry.eventstore.reprocessing.*",
"sentry.eventstore.reprocessing.redis",
+ "sentry.eventstream.*",
"sentry.eventtypes.error",
+ "sentry.feedback.migrations.*",
+ "sentry.flags.migrations.*",
+ "sentry.grouping.api",
"sentry.grouping.component",
"sentry.grouping.fingerprinting",
+ "sentry.grouping.fingerprinting.*",
+ "sentry.grouping.grouping_info",
"sentry.grouping.ingest.*",
"sentry.grouping.parameterization",
+ "sentry.grouping.utils",
+ "sentry.grouping.variants",
"sentry.hybridcloud.*",
+ "sentry.identity.discord.*",
"sentry.identity.github_enterprise.*",
+ "sentry.identity.services.*",
+ "sentry.identity.vsts_extension.*",
+ "sentry.incidents.utils.*",
"sentry.ingest.slicing",
+ "sentry.integrations.discord.actions.*",
+ "sentry.integrations.discord.message_builder.base.component.*",
+ "sentry.integrations.discord.message_builder.base.embed.*",
+ "sentry.integrations.discord.utils.*",
+ "sentry.integrations.discord.views.*",
+ "sentry.integrations.discord.webhooks.*",
+ "sentry.integrations.github.actions.*",
+ "sentry.integrations.github_enterprise.actions.*",
+ "sentry.integrations.jira.actions.*",
+ "sentry.integrations.jira.endpoints.*",
+ "sentry.integrations.jira.models.*",
+ "sentry.integrations.jira_server.actions.*",
+ "sentry.integrations.jira_server.utils.*",
"sentry.integrations.models.integration_feature",
+ "sentry.integrations.project_management.*",
+ "sentry.integrations.repository.*",
+ "sentry.integrations.services.*",
+ "sentry.integrations.slack.threads.*",
+ "sentry.integrations.slack.views.*",
+ "sentry.integrations.vsts.actions.*",
+ "sentry.integrations.vsts.tasks.*",
+ "sentry.integrations.web.debug.*",
"sentry.issues",
"sentry.issues.analytics",
"sentry.issues.apps",
"sentry.issues.constants",
"sentry.issues.endpoints",
+ "sentry.issues.endpoints.actionable_items",
"sentry.issues.endpoints.group_activities",
"sentry.issues.endpoints.group_event_details",
"sentry.issues.endpoints.group_events",
- "sentry.issues.endpoints.group_participants",
+ "sentry.issues.endpoints.group_notes",
+ "sentry.issues.endpoints.group_notes_details",
+ "sentry.issues.endpoints.group_similar_issues_embeddings",
+ "sentry.issues.endpoints.group_tombstone",
+ "sentry.issues.endpoints.group_tombstone_details",
+ "sentry.issues.endpoints.organization_eventid",
+ "sentry.issues.endpoints.organization_group_index",
"sentry.issues.endpoints.organization_group_index_stats",
"sentry.issues.endpoints.organization_group_search_views",
"sentry.issues.endpoints.organization_release_previous_commits",
"sentry.issues.endpoints.organization_searches",
+ "sentry.issues.endpoints.organization_shortid",
+ "sentry.issues.endpoints.project_event_details",
"sentry.issues.endpoints.project_events",
+ "sentry.issues.endpoints.project_group_index",
+ "sentry.issues.endpoints.project_group_stats",
"sentry.issues.endpoints.project_stacktrace_link",
"sentry.issues.endpoints.related_issues",
"sentry.issues.endpoints.shared_group_details",
@@ -420,6 +460,7 @@ module = [
"sentry.issues.receivers",
"sentry.issues.related.*",
"sentry.issues.run",
+ "sentry.issues.services.*",
"sentry.issues.status_change",
"sentry.issues.status_change_consumer",
"sentry.issues.status_change_message",
@@ -427,39 +468,78 @@ module = [
"sentry.lang.java.processing",
"sentry.llm.*",
"sentry.migrations.*",
+ "sentry.models.activity",
"sentry.models.event",
"sentry.models.eventattachment",
+ "sentry.models.groupassignee",
+ "sentry.models.grouphistory",
"sentry.models.groupsubscription",
- "sentry.monkey",
+ "sentry.models.options.*",
+ "sentry.monkey.*",
+ "sentry.nodestore.*",
"sentry.nodestore.base",
"sentry.nodestore.bigtable.backend",
"sentry.nodestore.django.backend",
"sentry.nodestore.django.models",
"sentry.nodestore.filesystem.backend",
"sentry.nodestore.models",
+ "sentry.notifications.services.*",
"sentry.organizations.*",
"sentry.ownership.*",
"sentry.plugins.base.response",
"sentry.plugins.base.view",
+ "sentry.plugins.validators.*",
+ "sentry.post_process_forwarder.*",
"sentry.profiles.*",
- "sentry.projects.services.*",
+ "sentry.projects.*",
+ "sentry.queue.*",
"sentry.ratelimits.leaky_bucket",
"sentry.relay.config.metric_extraction",
+ "sentry.relay.types.*",
+ "sentry.release_health.release_monitor.*",
+ "sentry.relocation.services.relocation_export.*",
+ "sentry.remote_subscriptions.migrations.*",
+ "sentry.replays.consumers.*",
+ "sentry.replays.lib.new_query.*",
+ "sentry.replays.migrations.*",
"sentry.reprocessing2",
+ "sentry.roles.*",
+ "sentry.rules.actions.sentry_apps.*",
+ "sentry.rules.conditions.*",
+ "sentry.rules.history.endpoints.*",
"sentry.runner.*",
"sentry.search.snuba.backend",
- "sentry.sentry_metrics.consumers.indexer.slicing_router",
+ "sentry.security.*",
+ "sentry.seer.similarity.*",
+ "sentry.sentry_apps.external_issues.*",
+ "sentry.sentry_apps.services.*",
+ "sentry.sentry_apps.utils.*",
+ "sentry.sentry_apps.web.*",
+ "sentry.sentry_metrics.consumers.indexer.*",
+ "sentry.sentry_metrics.indexer.limiters.*",
+ "sentry.shared_integrations.exceptions.*",
+ "sentry.slug.*",
"sentry.snuba.metrics.extraction",
+ "sentry.snuba.metrics.naming_layer.*",
+ "sentry.snuba.query_subscriptions.*",
+ "sentry.spans.grouping.*",
"sentry.stacktraces.platform",
"sentry.tasks.beacon",
"sentry.tasks.commit_context",
+ "sentry.tasks.embeddings_grouping.backfill_seer_grouping_records_for_project",
"sentry.tasks.on_demand_metrics",
"sentry.tasks.reprocessing2",
"sentry.tasks.store",
"sentry.taskworker.*",
+ "sentry.tempest.endpoints.*",
+ "sentry.tempest.migrations.*",
"sentry.testutils.helpers.task_runner",
- "sentry.types.actor",
- "sentry.types.region",
+ "sentry.testutils.skips",
+ "sentry.toolbar.utils.*",
+ "sentry.trash.*",
+ "sentry.types.*",
+ "sentry.uptime.migrations.*",
+ "sentry.usage_accountant.*",
"sentry.users.*",
"sentry.utils.arroyo",
"sentry.utils.assets",
@@ -476,6 +556,7 @@ module = [
"sentry.utils.imports",
"sentry.utils.iterators",
"sentry.utils.javascript",
+ "sentry.utils.kvstore.*",
"sentry.utils.lazy_service_wrapper",
"sentry.utils.locking.*",
"sentry.utils.migrations",
@@ -486,6 +567,7 @@ module = [
"sentry.utils.pubsub",
"sentry.utils.redis",
"sentry.utils.redis_metrics",
+ "sentry.utils.sdk_crashes.*",
"sentry.utils.sentry_apps.*",
"sentry.utils.services",
"sentry.utils.sms",
@@ -496,20 +578,46 @@ module = [
"sentry.web.frontend.auth_provider_login",
"sentry.web.frontend.cli",
"sentry.web.frontend.csv",
+ "sentry.web.frontend.mixins.*",
+ "sentry.workflow_engine.handlers.action.*",
+ "sentry.workflow_engine.handlers.condition.*",
+ "sentry.workflow_engine.migrations.*",
"sentry_plugins.base",
+ "social_auth.migrations.*",
+ "sudo.*",
+ "tests.sentry.audit_log.services.*",
"tests.sentry.deletions.test_group",
"tests.sentry.event_manager.test_event_manager",
"tests.sentry.grouping.ingest.test_seer",
"tests.sentry.grouping.test_fingerprinting",
"tests.sentry.hybridcloud.*",
+ "tests.sentry.incidents.serializers.*",
+ "tests.sentry.integrations.msteams.webhook.*",
+ "tests.sentry.integrations.repository.base.*",
+ "tests.sentry.integrations.repository.issue_alert.*",
+ "tests.sentry.integrations.slack.threads.*",
"tests.sentry.issues",
"tests.sentry.issues.endpoints",
"tests.sentry.issues.endpoints.test_actionable_items",
+ "tests.sentry.issues.endpoints.test_group_activities",
+ "tests.sentry.issues.endpoints.test_group_details",
+ "tests.sentry.issues.endpoints.test_group_event_details",
+ "tests.sentry.issues.endpoints.test_group_events",
+ "tests.sentry.issues.endpoints.test_group_hashes",
+ "tests.sentry.issues.endpoints.test_group_notes",
+ "tests.sentry.issues.endpoints.test_group_notes_details",
+ "tests.sentry.issues.endpoints.test_group_similar_issues_embeddings",
+ "tests.sentry.issues.endpoints.test_group_tombstone",
+ "tests.sentry.issues.endpoints.test_group_tombstone_details",
"tests.sentry.issues.endpoints.test_organization_group_search_views",
"tests.sentry.issues.endpoints.test_organization_searches",
+ "tests.sentry.issues.endpoints.test_organization_shortid",
+ "tests.sentry.issues.endpoints.test_project_group_stats",
"tests.sentry.issues.endpoints.test_project_stacktrace_link",
"tests.sentry.issues.endpoints.test_related_issues",
+ "tests.sentry.issues.endpoints.test_shared_group_details",
"tests.sentry.issues.endpoints.test_source_map_debug",
+ "tests.sentry.issues.endpoints.test_team_groups_old",
"tests.sentry.issues.test_attributes",
"tests.sentry.issues.test_escalating",
"tests.sentry.issues.test_escalating_issues_alg",
@@ -530,12 +638,25 @@ module = [
"tests.sentry.issues.test_status_change",
"tests.sentry.issues.test_status_change_consumer",
"tests.sentry.issues.test_update_inbox",
+ "tests.sentry.organizations.*",
"tests.sentry.ownership.*",
+ "tests.sentry.post_process_forwarder.*",
+ "tests.sentry.profiling.*",
+ "tests.sentry.queue.*",
"tests.sentry.ratelimits.test_leaky_bucket",
"tests.sentry.relay.config.test_metric_extraction",
+ "tests.sentry.replays.unit.lib.*",
+ "tests.sentry.rules.actions.base.*",
+ "tests.sentry.security.*",
+ "tests.sentry.snuba.metrics.test_metrics_query_layer.*",
+ "tests.sentry.tasks.integrations.*",
"tests.sentry.tasks.test_on_demand_metrics",
+ "tests.sentry.types.*",
"tests.sentry.types.test_actor",
"tests.sentry.types.test_region",
+ "tests.sentry.usage_accountant.*",
+ "tests.sentry.users.services.*",
+ "tests.sentry.utils.mockdata.*",
"tests.sentry.web.frontend.test_cli",
"tools.*",
]
diff --git a/requirements-base.txt b/requirements-base.txt
index 4c9b1123811dd5..7b4659d0bbf825 100644
--- a/requirements-base.txt
+++ b/requirements-base.txt
@@ -41,7 +41,7 @@ packaging>=24.1
parsimonious>=0.10.0
petname>=2.6
phonenumberslite>=8.12.32
-Pillow>=10.4.0
+Pillow>=11.0.0
progressbar2>=3.41.0
protobuf>=5.27.3
proto-plus>=1.25.0
@@ -65,12 +65,12 @@ requests>=2.32.3
rfc3339-validator>=0.1.2
rfc3986-validator>=0.1.1
# [end] jsonschema format validators
-sentry-arroyo>=2.18.2
-sentry-kafka-schemas>=0.1.122
+sentry-arroyo>=2.19.9
+sentry-kafka-schemas>=0.1.125
sentry-ophio==1.0.0
sentry-protos>=0.1.37
sentry-redis-tools>=0.1.7
-sentry-relay>=0.9.3
+sentry-relay>=0.9.4
sentry-sdk[http2]>=2.19.2
slack-sdk>=3.27.2
snuba-sdk>=3.0.43
diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt
index e56d30cc4a488d..b562c3cee84851 100644
--- a/requirements-dev-frozen.txt
+++ b/requirements-dev-frozen.txt
@@ -23,6 +23,7 @@ celery==5.3.5
certifi==2024.7.4
cffi==1.17.1
cfgv==3.3.1
+chardet==5.2.0
charset-normalizer==3.4.0
click==8.1.7
click-didyoumean==0.3.0
@@ -36,10 +37,10 @@ cryptography==43.0.1
cssselect==1.0.3
cssutils==2.9.0
datadog==0.49.1
-devservices==1.0.5
+devservices==1.0.8
distlib==0.3.8
distro==1.8.0
-django==5.1.1
+django==5.1.4
django-crispy-forms==1.14.0
django-csp==3.8
django-pg-zero-downtime-migrations==0.16
@@ -105,7 +106,7 @@ mmh3==4.0.0
more-itertools==8.13.0
msgpack==1.1.0
msgpack-types==0.2.0
-mypy==1.13.0
+mypy==1.14.0
mypy-extensions==1.0.0
nodeenv==1.9.1
oauthlib==3.1.0
@@ -125,7 +126,7 @@ pep517==0.12.0
petname==2.6
phabricator==0.7.0
phonenumberslite==8.12.55
-pillow==10.4.0
+pillow==11.0.0
pip-tools==7.1.0
platformdirs==4.2.0
pluggy==1.5.0
@@ -169,7 +170,7 @@ redis==3.4.1
redis-py-cluster==2.1.0
referencing==0.30.2
regex==2022.9.13
-reportlab==4.0.7
+reportlab==4.2.5
requests==2.32.3
requests-file==2.1.0
requests-oauthlib==1.2.0
@@ -180,17 +181,17 @@ rpds-py==0.20.0
rsa==4.8
s3transfer==0.10.0
selenium==4.16.0
-sentry-arroyo==2.18.2
+sentry-arroyo==2.19.9
sentry-cli==2.16.0
sentry-covdefaults-disable-branch-coverage==1.0.2
sentry-devenv==1.14.2
sentry-forked-django-stubs==5.1.1.post1
-sentry-forked-djangorestframework-stubs==3.15.1.post2
-sentry-kafka-schemas==0.1.122
+sentry-forked-djangorestframework-stubs==3.15.2.post1
+sentry-kafka-schemas==0.1.125
sentry-ophio==1.0.0
-sentry-protos==0.1.37
+sentry-protos==0.1.39
sentry-redis-tools==0.1.7
-sentry-relay==0.9.3
+sentry-relay==0.9.4
sentry-sdk==2.19.2
sentry-usage-accountant==0.0.10
simplejson==3.17.6
@@ -241,7 +242,7 @@ virtualenv==20.25.0
wcwidth==0.2.10
werkzeug==3.0.6
wheel==0.38.4
-wrapt==1.17.0rc1
+wrapt==1.17.0
wsproto==1.1.0
xmlsec==1.3.14
zstandard==0.18.0
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 9dd1a81506ba10..0748993df7b1a1 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,7 +1,7 @@
--index-url https://pypi.devinfra.sentry.io/simple
sentry-devenv>=1.14.2
-devservices>=1.0.5
+devservices>=1.0.8
covdefaults>=2.3.0
sentry-covdefaults-disable-branch-coverage>=1.0.2
@@ -38,10 +38,10 @@ packaging>=21.3
# for type checking
sentry-forked-django-stubs>=5.1.1.post1
-sentry-forked-djangorestframework-stubs>=3.15.1.post2
+sentry-forked-djangorestframework-stubs>=3.15.2.post1
lxml-stubs
msgpack-types>=0.2.0
-mypy>=1.13
+mypy>=1.14
types-beautifulsoup4
types-cachetools
types-jsonschema
diff --git a/requirements-frozen.txt b/requirements-frozen.txt
index 40b270e613db9c..79ed8385b6c540 100644
--- a/requirements-frozen.txt
+++ b/requirements-frozen.txt
@@ -20,6 +20,7 @@ cachetools==5.3.0
celery==5.3.5
certifi==2024.7.4
cffi==1.17.1
+chardet==5.2.0
charset-normalizer==3.4.0
click==8.1.7
click-didyoumean==0.3.0
@@ -32,7 +33,7 @@ cssselect==1.0.3
cssutils==2.9.0
datadog==0.49.1
distro==1.8.0
-django==5.1.1
+django==5.1.4
django-crispy-forms==1.14.0
django-csp==3.8
django-pg-zero-downtime-migrations==0.16
@@ -88,7 +89,7 @@ parsimonious==0.10.0
petname==2.6
phabricator==0.7.0
phonenumberslite==8.12.55
-pillow==10.4.0
+pillow==11.0.0
progressbar2==3.41.0
prompt-toolkit==3.0.41
proto-plus==1.25.0
@@ -115,7 +116,7 @@ redis==3.4.1
redis-py-cluster==2.1.0
referencing==0.30.2
regex==2022.9.13
-reportlab==4.0.7
+reportlab==4.2.5
requests==2.32.3
requests-file==2.1.0
requests-oauthlib==1.2.0
@@ -124,12 +125,12 @@ rfc3986-validator==0.1.1
rpds-py==0.20.0
rsa==4.8
s3transfer==0.10.0
-sentry-arroyo==2.18.2
-sentry-kafka-schemas==0.1.122
+sentry-arroyo==2.19.9
+sentry-kafka-schemas==0.1.125
sentry-ophio==1.0.0
-sentry-protos==0.1.37
+sentry-protos==0.1.39
sentry-redis-tools==0.1.7
-sentry-relay==0.9.3
+sentry-relay==0.9.4
sentry-sdk==2.19.2
sentry-usage-accountant==0.0.10
simplejson==3.17.6
diff --git a/requirements-getsentry.txt b/requirements-getsentry.txt
index 2dbf4c23d860b8..21cff131171f2d 100644
--- a/requirements-getsentry.txt
+++ b/requirements-getsentry.txt
@@ -10,5 +10,5 @@ Avalara==20.9.0
iso3166
pycountry==17.5.14
pyvat==1.3.15
-reportlab==4.0.7
+reportlab==4.2.5
stripe==3.1.0
diff --git a/setup.cfg b/setup.cfg
index aae014f9aa7768..c7b83b9787a533 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,6 @@
[metadata]
name = sentry
-version = 24.12.0.dev0
+version = 25.1.0.dev0
description = A realtime logging and aggregation server.
long_description = file: README.md
long_description_content_type = text/markdown
diff --git a/src/sentry/analytics/events/groupowner_assignment.py b/src/sentry/analytics/events/groupowner_assignment.py
index 30d2913bed831e..81926fecc2bd98 100644
--- a/src/sentry/analytics/events/groupowner_assignment.py
+++ b/src/sentry/analytics/events/groupowner_assignment.py
@@ -9,6 +9,9 @@ class GroupOwnerAssignment(analytics.Event):
analytics.Attribute("project_id"),
analytics.Attribute("group_id"),
analytics.Attribute("new_assignment", type=bool),
+ analytics.Attribute("user_id", required=False),
+ analytics.Attribute("group_owner_type"),
+ analytics.Attribute("method", required=False),
)
diff --git a/src/sentry/api/api_owners.py b/src/sentry/api/api_owners.py
index 60e5d1884f26a3..efb352fe7185ce 100644
--- a/src/sentry/api/api_owners.py
+++ b/src/sentry/api/api_owners.py
@@ -28,3 +28,4 @@ class ApiOwner(Enum):
TELEMETRY_EXPERIENCE = "telemetry-experience"
UNOWNED = "unowned"
WEB_FRONTEND_SDKS = "team-web-sdk-frontend"
+ GDX = "gdx"
diff --git a/src/sentry/api/authentication.py b/src/sentry/api/authentication.py
index eb0fd888294f29..01a76662f5393a 100644
--- a/src/sentry/api/authentication.py
+++ b/src/sentry/api/authentication.py
@@ -293,11 +293,11 @@ class ClientIdSecretAuthentication(QuietBasicAuthentication):
"""
def authenticate(self, request: Request):
- if not request.json_body:
+ if not request.data:
raise AuthenticationFailed("Invalid request")
- client_id = request.json_body.get("client_id")
- client_secret = request.json_body.get("client_secret")
+ client_id = request.data.get("client_id")
+ client_secret = request.data.get("client_secret")
invalid_pair_error = AuthenticationFailed("Invalid Client ID / Secret pair")
diff --git a/src/sentry/api/base.py b/src/sentry/api/base.py
index fddcd7dd555142..680da60495fa82 100644
--- a/src/sentry/api/base.py
+++ b/src/sentry/api/base.py
@@ -9,7 +9,6 @@
from typing import Any
from urllib.parse import quote as urlquote
-import orjson
import sentry_sdk
from django.conf import settings
from django.http import HttpResponse
@@ -342,29 +341,6 @@ def handle_exception_with_details(
def create_audit_entry(self, request: Request, transaction_id=None, **kwargs):
return create_audit_entry(request, transaction_id, audit_logger, **kwargs)
- def load_json_body(self, request: Request):
- """
- Attempts to load the request body when it's JSON.
-
- The end result is ``request.json_body`` having a value. When it can't
- load the body as JSON, for any reason, ``request.json_body`` is None.
-
- The request flow is unaffected and no exceptions are ever raised.
- """
-
- request.json_body = None
-
- if not request.META.get("CONTENT_TYPE", "").startswith("application/json"):
- return
-
- if not len(request.body):
- return
-
- try:
- request.json_body = orjson.loads(request.body)
- except orjson.JSONDecodeError:
- return
-
def initialize_request(self, request: HttpRequest, *args: Any, **kwargs: Any) -> Request:
# XXX: Since DRF 3.x, when the request is passed into
# `initialize_request` it's set as an internal variable on the returned
@@ -398,7 +374,10 @@ def dispatch(self, request: Request, *args, **kwargs) -> Response:
self.args = args
self.kwargs = kwargs
request = self.initialize_request(request, *args, **kwargs)
- self.load_json_body(request)
+ # XXX: without this seemingly useless access to `.body` we are
+ # unable to access `request.body` later on due to `rest_framework`
+ # loading the request body via `request.read()`
+ request.body
self.request = request
self.headers = self.default_response_headers # deprecate?
diff --git a/src/sentry/api/bases/organization.py b/src/sentry/api/bases/organization.py
index 1866b60c6316f1..4e5d2e632d0abc 100644
--- a/src/sentry/api/bases/organization.py
+++ b/src/sentry/api/bases/organization.py
@@ -230,6 +230,14 @@ class OrganizationMetricsPermission(OrganizationPermission):
}
+class OrganizationFlagWebHookSigningSecretPermission(OrganizationPermission):
+ scope_map = {
+ "GET": ["org:read", "org:write", "org:admin"],
+ "POST": ["org:read", "org:write", "org:admin"],
+ "DELETE": ["org:write", "org:admin"],
+ }
+
+
class ControlSiloOrganizationEndpoint(Endpoint):
"""
A base class for endpoints that use an organization scoping but lives in the control silo
diff --git a/src/sentry/api/bases/organizationmember.py b/src/sentry/api/bases/organizationmember.py
index 4f322056d3a701..c140bb06f73572 100644
--- a/src/sentry/api/bases/organizationmember.py
+++ b/src/sentry/api/bases/organizationmember.py
@@ -1,11 +1,11 @@
from __future__ import annotations
-from typing import Any
+from typing import Any, NotRequired, TypedDict
from rest_framework import serializers
+from rest_framework.fields import empty
from rest_framework.request import Request
-from sentry import features
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.permissions import StaffPermissionMixin
from sentry.db.models.fields.bounded import BoundedAutoField
@@ -43,10 +43,7 @@ def has_object_permission(
is_role_above_member = "member:admin" in scopes or "member:write" in scopes
if isinstance(organization, RpcUserOrganizationContext):
organization = organization.organization
- return is_role_above_member or (
- features.has("organizations:members-invite-teammates", organization)
- and not organization.flags.disable_member_invite
- )
+ return is_role_above_member or not organization.flags.disable_member_invite
class MemberAndStaffPermission(StaffPermissionMixin, MemberPermission):
@@ -65,7 +62,7 @@ def to_internal_value(self, data):
return data
return super().to_internal_value(data)
- def run_validation(self, data):
+ def run_validation(self, data=empty):
if data == "me":
return data
return super().run_validation(data)
@@ -75,6 +72,15 @@ class MemberSerializer(serializers.Serializer):
id = MemberIdField(min_value=0, max_value=BoundedAutoField.MAX_VALUE, required=True)
+class _FilterKwargs(TypedDict):
+ organization: Organization
+ user_id: NotRequired[int]
+ user_is_active: NotRequired[bool]
+ id: NotRequired[int | str]
+ organization_id: NotRequired[int]
+ invite_status: NotRequired[int]
+
+
class OrganizationMemberEndpoint(OrganizationEndpoint):
def convert_args(
self,
@@ -105,16 +111,16 @@ def _get_member(
member_id: int | str,
invite_status: InviteStatus | None = None,
) -> OrganizationMember:
- args = []
- kwargs = dict(organization=organization)
+ kwargs: _FilterKwargs = {"organization": organization}
if member_id == "me":
- kwargs.update(user_id=request.user.id, user_is_active=True)
+ kwargs["user_id"] = request.user.id
+ kwargs["user_is_active"] = True
else:
- kwargs.update(id=member_id, organization_id=organization.id)
+ kwargs["id"] = member_id
+ kwargs["organization_id"] = organization.id
if invite_status:
- kwargs.update(invite_status=invite_status.value)
+ kwargs["invite_status"] = invite_status.value
- om = OrganizationMember.objects.filter(*args, **kwargs).get()
- return om
+ return OrganizationMember.objects.filter(**kwargs).get()
diff --git a/src/sentry/api/decorators.py b/src/sentry/api/decorators.py
index ef28203d678411..c6e265588890d0 100644
--- a/src/sentry/api/decorators.py
+++ b/src/sentry/api/decorators.py
@@ -4,7 +4,11 @@
from rest_framework.request import Request
from rest_framework.response import Response
-from sentry.api.exceptions import EmailVerificationRequired, SudoRequired
+from sentry.api.exceptions import (
+ EmailVerificationRequired,
+ PrimaryEmailVerificationRequired,
+ SudoRequired,
+)
from sentry.models.apikey import is_api_key_auth
from sentry.models.apitoken import is_api_token_auth
from sentry.models.orgauthtoken import is_org_auth_token_auth
@@ -45,3 +49,13 @@ def wrapped(self, request: Request, *args, **kwargs) -> Response:
return func(self, request, *args, **kwargs)
return wrapped
+
+
+def primary_email_verification_required(func):
+ @wraps(func)
+ def wrapped(self, request: Request, *args, **kwargs) -> Response:
+ if isinstance(request.user, AnonymousUser) or not request.user.has_verified_primary_email():
+ raise PrimaryEmailVerificationRequired(request.user)
+ return func(self, request, *args, **kwargs)
+
+ return wrapped
diff --git a/src/sentry/api/endpoints/admin_project_configs.py b/src/sentry/api/endpoints/admin_project_configs.py
index 3b46ff73c17ac0..b8b1b5a8730b87 100644
--- a/src/sentry/api/endpoints/admin_project_configs.py
+++ b/src/sentry/api/endpoints/admin_project_configs.py
@@ -1,3 +1,6 @@
+from collections.abc import MutableMapping
+from typing import Any
+
from django.http import Http404
from rest_framework.request import Request
from rest_framework.response import Response
@@ -7,11 +10,12 @@
from sentry.api.base import Endpoint, region_silo_endpoint
from sentry.api.permissions import SuperuserOrStaffFeatureFlaggedPermission
from sentry.models.project import Project
+from sentry.models.projectkey import ProjectKey
from sentry.relay import projectconfig_cache
+from sentry.relay.config import ProjectConfig, get_project_config
from sentry.tasks.relay import schedule_invalidate_project_config
-# NOTE: This endpoint should be in getsentry
@region_silo_endpoint
class AdminRelayProjectConfigsEndpoint(Endpoint):
owner = ApiOwner.OWNERS_INGEST
@@ -22,45 +26,109 @@ class AdminRelayProjectConfigsEndpoint(Endpoint):
permission_classes = (SuperuserOrStaffFeatureFlaggedPermission,)
def get(self, request: Request) -> Response:
+ """The GET endpoint retrieves the project configs for a specific project_id
+ or a set of project keys.
+ If a projectId is provided, the configs for all project keys are returned.
+ If a projectKey is provided, the config for that specific project key is returned.
+ Both a projectId and a projectKey may be provided in the same request.
+
+ If the project config is currently in cache, will return the cache entry.
+ If the project config is not in cache, the project config for that key will be null.
+ """
project_id = request.GET.get("projectId")
+ project_key_param = request.GET.get("projectKey")
- project_keys = []
- if project_id is not None:
- try:
+ if not project_id and not project_key_param:
+ return Response(
+ {"error": "Please supply either the projectId or projectKey parameter."}, status=400
+ )
+
+ try:
+ if project_id:
project = Project.objects.get_from_cache(id=project_id)
- for project_key in project.key_set.all():
- project_keys.append(project_key.public_key)
+ else:
+ project = None
+ if project_key_param:
+ supplied_project_key = ProjectKey.objects.get(public_key=project_key_param)
+ else:
+ supplied_project_key = None
+ except Exception:
+ raise Http404
- except Exception:
- raise Http404
+ project_keys = self._get_project_keys(project, supplied_project_key)
- project_key_param = request.GET.get("projectKey")
- if project_key_param is not None:
- project_keys.append(project_key_param)
-
- configs = {}
- for key in project_keys:
- cached_config = projectconfig_cache.backend.get(key)
- if cached_config is not None:
- configs[key] = cached_config
+ configs: MutableMapping[str, MutableMapping[str, Any] | ProjectConfig | None] = {}
+ uncached_keys = []
+ for project_key in project_keys:
+ if isinstance(project_key, ProjectKey) and project_key.public_key is not None:
+ cached_config = projectconfig_cache.backend.get(project_key.public_key)
+ if cached_config is not None:
+ configs[project_key.public_key] = cached_config
+ else:
+ configs[project_key.public_key] = None
+ uncached_keys.append(project_key)
+
+ if uncached_keys:
+ if supplied_project_key is not None:
+ generated_configs = self._get_project_config_sync(
+ supplied_project_key.project, uncached_keys
+ )
+ elif project is not None:
+ generated_configs = self._get_project_config_sync(project, uncached_keys)
else:
- configs[key] = None
+ generated_configs = {}
+
+ for key, config in generated_configs.items():
+ configs[key] = config
- # TODO: if we don't think we'll add anything to the endpoint
- # we may as well return just the configs
return Response({"configs": configs}, status=200)
def post(self, request: Request) -> Response:
- """Regenerate the project config"""
- project_id = request.GET.get("projectId")
+ """The POST endpoint recalculates the project configs for a specific projectId.
+ The project config for all projectKeys of the provided projectId is recalculated
+ in a sync manner and stored in the cache subsequently.
+ """
+ project_id = request.data.get("projectId")
- if project_id is not None:
- try:
- schedule_invalidate_project_config(
- project_id=project_id, trigger="_admin_trigger_invalidate_project_config"
- )
+ if not project_id:
+ return Response({"error": "Missing projectId parameter"}, status=400)
+
+ try:
+ project = Project.objects.get_from_cache(id=project_id)
+ project_keys = self._get_project_keys(project)
+ schedule_invalidate_project_config(
+ project_id=project_id, trigger="_admin_trigger_invalidate_project_config"
+ )
+ except Exception:
+ raise Http404
+
+ configs = self._get_project_config_sync(project, project_keys)
+ projectconfig_cache.backend.set_many(configs)
+ return Response(status=201)
+
+ def _get_project_keys(
+ self, project: Project | None = None, project_key: ProjectKey | None = None
+ ) -> list[ProjectKey]:
+ project_keys = []
+
+ if project_key is not None:
+ project_keys.append(project_key)
+
+ if project is not None:
+ for project_key2 in project.key_set.all():
+ project_keys.append(project_key2)
+
+ return project_keys
+
+ def _get_project_config_sync(
+ self, project: Project, project_keys: list[ProjectKey]
+ ) -> MutableMapping[str, MutableMapping[str, Any]]:
+ configs: MutableMapping[str, MutableMapping[str, Any]] = {}
- except Exception:
- raise Http404
+ for project_key in project_keys:
+ if project_key.public_key is not None:
+ configs[project_key.public_key] = get_project_config(
+ project, project_keys=[project_key]
+ ).to_dict()
- return Response(status=204)
+ return configs
diff --git a/src/sentry/api/endpoints/api_application_details.py b/src/sentry/api/endpoints/api_application_details.py
index 73f6dadeeba959..09492f1e433d73 100644
--- a/src/sentry/api/endpoints/api_application_details.py
+++ b/src/sentry/api/endpoints/api_application_details.py
@@ -33,8 +33,26 @@ class ApiApplicationSerializer(serializers.Serializer):
)
+class ApiApplicationEndpoint(Endpoint):
+ def convert_args(
+ self,
+ request: Request,
+ app_id: str,
+ *args,
+ **kwargs,
+ ):
+ try:
+ application = ApiApplication.objects.get(
+ owner_id=request.user.id, client_id=app_id, status=ApiApplicationStatus.active
+ )
+ except ApiApplication.DoesNotExist:
+ raise ResourceDoesNotExist
+ kwargs["application"] = application
+ return (args, kwargs)
+
+
@control_silo_endpoint
-class ApiApplicationDetailsEndpoint(Endpoint):
+class ApiApplicationDetailsEndpoint(ApiApplicationEndpoint):
publish_status = {
"DELETE": ApiPublishStatus.PRIVATE,
"GET": ApiPublishStatus.PRIVATE,
@@ -43,24 +61,10 @@ class ApiApplicationDetailsEndpoint(Endpoint):
authentication_classes = (SessionAuthentication,)
permission_classes = (IsAuthenticated,)
- def get(self, request: Request, app_id) -> Response:
- try:
- instance = ApiApplication.objects.get(
- owner_id=request.user.id, client_id=app_id, status=ApiApplicationStatus.active
- )
- except ApiApplication.DoesNotExist:
- raise ResourceDoesNotExist
-
- return Response(serialize(instance, request.user))
-
- def put(self, request: Request, app_id) -> Response:
- try:
- instance = ApiApplication.objects.get(
- owner_id=request.user.id, client_id=app_id, status=ApiApplicationStatus.active
- )
- except ApiApplication.DoesNotExist:
- raise ResourceDoesNotExist
+ def get(self, request: Request, application: ApiApplication) -> Response:
+ return Response(serialize(application, request.user))
+ def put(self, request: Request, application: ApiApplication) -> Response:
serializer = ApiApplicationSerializer(data=request.data, partial=True)
if serializer.is_valid():
@@ -79,22 +83,15 @@ def put(self, request: Request, app_id) -> Response:
if "termsUrl" in result:
kwargs["terms_url"] = result["termsUrl"]
if kwargs:
- instance.update(**kwargs)
- return Response(serialize(instance, request.user), status=200)
+ application.update(**kwargs)
+ return Response(serialize(application, request.user), status=200)
return Response(serializer.errors, status=400)
- def delete(self, request: Request, app_id) -> Response:
- try:
- instance = ApiApplication.objects.get(
- owner_id=request.user.id, client_id=app_id, status=ApiApplicationStatus.active
- )
- except ApiApplication.DoesNotExist:
- raise ResourceDoesNotExist
-
+ def delete(self, request: Request, application: ApiApplication) -> Response:
with transaction.atomic(using=router.db_for_write(ApiApplication)):
- updated = ApiApplication.objects.filter(id=instance.id).update(
+ updated = ApiApplication.objects.filter(id=application.id).update(
status=ApiApplicationStatus.pending_deletion
)
if updated:
- ScheduledDeletion.schedule(instance, days=0, actor=request.user)
+ ScheduledDeletion.schedule(application, days=0, actor=request.user)
return Response(status=204)
diff --git a/src/sentry/api/endpoints/api_application_rotate_secret.py b/src/sentry/api/endpoints/api_application_rotate_secret.py
index 31e02a9729e8d7..a86a9fe663fc5a 100644
--- a/src/sentry/api/endpoints/api_application_rotate_secret.py
+++ b/src/sentry/api/endpoints/api_application_rotate_secret.py
@@ -5,14 +5,14 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
-from sentry.api.base import Endpoint, control_silo_endpoint
-from sentry.api.exceptions import ResourceDoesNotExist
+from sentry.api.base import control_silo_endpoint
+from sentry.api.endpoints.api_application_details import ApiApplicationEndpoint
from sentry.api.serializers import serialize
-from sentry.models.apiapplication import ApiApplication, ApiApplicationStatus, generate_token
+from sentry.models.apiapplication import ApiApplication, generate_token
@control_silo_endpoint
-class ApiApplicationRotateSecretEndpoint(Endpoint):
+class ApiApplicationRotateSecretEndpoint(ApiApplicationEndpoint):
publish_status = {
"POST": ApiPublishStatus.PRIVATE,
}
@@ -20,13 +20,7 @@ class ApiApplicationRotateSecretEndpoint(Endpoint):
authentication_classes = (SessionAuthentication,)
permission_classes = (IsAuthenticated,)
- def post(self, request: Request, app_id) -> Response:
- try:
- api_application = ApiApplication.objects.get(
- owner_id=request.user.id, client_id=app_id, status=ApiApplicationStatus.active
- )
- except ApiApplication.DoesNotExist:
- raise ResourceDoesNotExist
+ def post(self, request: Request, application: ApiApplication) -> Response:
new_token = generate_token()
- api_application.update(client_secret=new_token)
+ application.update(client_secret=new_token)
return Response(serialize({"clientSecret": new_token}))
diff --git a/src/sentry/api/endpoints/api_authorizations.py b/src/sentry/api/endpoints/api_authorizations.py
index c8daaad0b88755..3013e0dca904b1 100644
--- a/src/sentry/api/endpoints/api_authorizations.py
+++ b/src/sentry/api/endpoints/api_authorizations.py
@@ -50,7 +50,9 @@ def delete(self, request: Request) -> Response:
with outbox_context(transaction.atomic(using=router.db_for_write(ApiToken)), flush=False):
for token in ApiToken.objects.filter(
- user_id=request.user.id, application=auth.application_id
+ user_id=request.user.id,
+ application=auth.application_id,
+ scoping_organization_id=auth.organization_id,
):
token.delete()
diff --git a/src/sentry/api/endpoints/chunk.py b/src/sentry/api/endpoints/chunk.py
index 9095cef5a7ab6e..f8e1c275f05037 100644
--- a/src/sentry/api/endpoints/chunk.py
+++ b/src/sentry/api/endpoints/chunk.py
@@ -36,6 +36,7 @@
"portablepdbs", # Portable PDB debug file
"artifact_bundles", # Artifact Bundles for JavaScript Source Maps
"artifact_bundles_v2", # The `assemble` endpoint will check for missing chunks
+ "proguard", # Chunk-uploaded proguard mappings
)
@@ -122,6 +123,10 @@ def post(self, request: Request, organization) -> Response:
"""
Upload chunks and store them as FileBlobs
`````````````````````````````````````````
+
+ Requests to this endpoint should use the region-specific domain
+ eg. `us.sentry.io` or `de.sentry.io`
+
:pparam file file: The filename should be sha1 hash of the content.
Also not you can add up to MAX_CHUNKS_PER_REQUEST files
in this request.
diff --git a/src/sentry/api/endpoints/debug_files.py b/src/sentry/api/endpoints/debug_files.py
index f623e71a8e865c..a59efb4bb8c780 100644
--- a/src/sentry/api/endpoints/debug_files.py
+++ b/src/sentry/api/endpoints/debug_files.py
@@ -189,9 +189,9 @@ def get(self, request: Request, project: Project) -> Response:
class DebugFilesEndpoint(ProjectEndpoint):
owner = ApiOwner.OWNERS_INGEST
publish_status = {
- "DELETE": ApiPublishStatus.UNKNOWN,
- "GET": ApiPublishStatus.UNKNOWN,
- "POST": ApiPublishStatus.UNKNOWN,
+ "DELETE": ApiPublishStatus.PRIVATE,
+ "GET": ApiPublishStatus.PRIVATE,
+ "POST": ApiPublishStatus.PRIVATE,
}
permission_classes = (ProjectReleasePermission,)
@@ -351,6 +351,9 @@ def post(self, request: Request, project: Project) -> Response:
Unlike other API requests, files must be uploaded using the
traditional multipart/form-data content-type.
+ Requests to this endpoint should use the region-specific domain
+ eg. `us.sentry.io` or `de.sentry.io`
+
The file uploaded is a zip archive of a Apple .dSYM folder which
contains the individual debug images. Uploading through this endpoint
will create different files for the contained images.
diff --git a/src/sentry/api/endpoints/event_attachment_details.py b/src/sentry/api/endpoints/event_attachment_details.py
index a31d68a8778729..a2b71da478f977 100644
--- a/src/sentry/api/endpoints/event_attachment_details.py
+++ b/src/sentry/api/endpoints/event_attachment_details.py
@@ -51,8 +51,8 @@ def has_object_permission(self, request: Request, view, project):
class EventAttachmentDetailsEndpoint(ProjectEndpoint):
owner = ApiOwner.OWNERS_INGEST
publish_status = {
- "DELETE": ApiPublishStatus.UNKNOWN,
- "GET": ApiPublishStatus.UNKNOWN,
+ "DELETE": ApiPublishStatus.PRIVATE,
+ "GET": ApiPublishStatus.PRIVATE,
}
permission_classes = (EventAttachmentDetailsPermission,)
diff --git a/src/sentry/api/endpoints/event_attachments.py b/src/sentry/api/endpoints/event_attachments.py
index aca3603758ca0b..5acbbbd832de8e 100644
--- a/src/sentry/api/endpoints/event_attachments.py
+++ b/src/sentry/api/endpoints/event_attachments.py
@@ -16,7 +16,7 @@
class EventAttachmentsEndpoint(ProjectEndpoint):
owner = ApiOwner.OWNERS_INGEST
publish_status = {
- "GET": ApiPublishStatus.UNKNOWN,
+ "GET": ApiPublishStatus.PRIVATE,
}
def get(self, request: Request, project, event_id) -> Response:
diff --git a/src/sentry/api/endpoints/group_integration_details.py b/src/sentry/api/endpoints/group_integration_details.py
index 7663f366a458c0..fdc22c9e9e9601 100644
--- a/src/sentry/api/endpoints/group_integration_details.py
+++ b/src/sentry/api/endpoints/group_integration_details.py
@@ -266,12 +266,26 @@ def post(self, request: Request, group, integration_id) -> Response:
)
installation = integration.get_installation(organization_id=organization_id)
- try:
- data = installation.create_issue(request.data)
- except IntegrationFormError as exc:
- return Response(exc.field_errors, status=400)
- except IntegrationError as e:
- return Response({"non_field_errors": [str(e)]}, status=400)
+
+ with ProjectManagementEvent(
+ action_type=ProjectManagementActionType.CREATE_EXTERNAL_ISSUE_VIA_ISSUE_DETAIL,
+ integration=integration,
+ ).capture() as lifecycle:
+ lifecycle.add_extras(
+ {
+ "provider": integration.provider,
+ "integration_id": integration.id,
+ }
+ )
+
+ try:
+ data = installation.create_issue(request.data)
+ except IntegrationFormError as exc:
+ lifecycle.record_halt(exc)
+ return Response(exc.field_errors, status=400)
+ except IntegrationError as e:
+ lifecycle.record_failure(e)
+ return Response({"non_field_errors": [str(e)]}, status=400)
external_issue_key = installation.make_external_key(data)
external_issue, created = ExternalIssue.objects.get_or_create(
diff --git a/src/sentry/api/endpoints/group_tagkey_details.py b/src/sentry/api/endpoints/group_tagkey_details.py
index 1a43dce0f2290d..5bc94f32388487 100644
--- a/src/sentry/api/endpoints/group_tagkey_details.py
+++ b/src/sentry/api/endpoints/group_tagkey_details.py
@@ -1,5 +1,4 @@
-from drf_spectacular.types import OpenApiTypes
-from drf_spectacular.utils import OpenApiParameter, extend_schema
+from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.response import Response
@@ -38,13 +37,7 @@ class GroupTagKeyDetailsEndpoint(GroupEndpoint, EnvironmentMixin):
IssueParams.ISSUE_ID,
IssueParams.ISSUES_OR_GROUPS,
GlobalParams.ORG_ID_OR_SLUG,
- OpenApiParameter(
- name="key",
- location=OpenApiParameter.PATH,
- type=OpenApiTypes.STR,
- description="The tag key to look the values up for.",
- required=True,
- ),
+ IssueParams.KEY,
GlobalParams.ENVIRONMENT,
],
responses={
diff --git a/src/sentry/api/endpoints/group_tagkey_values.py b/src/sentry/api/endpoints/group_tagkey_values.py
index 42525aed6df77d..5b7f8d5e9c9260 100644
--- a/src/sentry/api/endpoints/group_tagkey_values.py
+++ b/src/sentry/api/endpoints/group_tagkey_values.py
@@ -1,7 +1,9 @@
+from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.response import Response
from sentry import analytics, tagstore
+from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import EnvironmentMixin, region_silo_endpoint
from sentry.api.bases.group import GroupEndpoint
@@ -9,25 +11,51 @@
from sentry.api.helpers.environments import get_environments
from sentry.api.serializers import serialize
from sentry.api.serializers.models.tagvalue import UserTagValueSerializer
+from sentry.apidocs.constants import (
+ RESPONSE_BAD_REQUEST,
+ RESPONSE_FORBIDDEN,
+ RESPONSE_NOT_FOUND,
+ RESPONSE_UNAUTHORIZED,
+)
+from sentry.apidocs.examples.tags_examples import TagsExamples
+from sentry.apidocs.parameters import GlobalParams, IssueParams
+from sentry.apidocs.utils import inline_sentry_response_serializer
+from sentry.tagstore.types import TagValueSerializerResponse
+@extend_schema(tags=["Events"])
@region_silo_endpoint
class GroupTagKeyValuesEndpoint(GroupEndpoint, EnvironmentMixin):
publish_status = {
- "GET": ApiPublishStatus.UNKNOWN,
+ "GET": ApiPublishStatus.PUBLIC,
}
+ owner = ApiOwner.ISSUES
+ @extend_schema(
+ operation_id="List a Tag's Values for an Issue",
+ description="Returns a list of values associated with this key for an issue.\nReturns at most 1000 values when paginated.",
+ parameters=[
+ IssueParams.ISSUE_ID,
+ IssueParams.ISSUES_OR_GROUPS,
+ GlobalParams.ORG_ID_OR_SLUG,
+ IssueParams.KEY,
+ IssueParams.SORT,
+ GlobalParams.ENVIRONMENT,
+ ],
+ responses={
+ 200: inline_sentry_response_serializer(
+ "TagKeyValuesDict", list[TagValueSerializerResponse]
+ ),
+ 400: RESPONSE_BAD_REQUEST,
+ 401: RESPONSE_UNAUTHORIZED,
+ 403: RESPONSE_FORBIDDEN,
+ 404: RESPONSE_NOT_FOUND,
+ },
+ examples=[TagsExamples.GROUP_TAGKEY_VALUES],
+ )
def get(self, request: Request, group, key) -> Response:
"""
List a Tag's Values
- ```````````````````
-
- Return a list of values associated with this key for an issue.
- When paginated can return at most 1000 values.
-
- :pparam string issue_id: the ID of the issue to retrieve.
- :pparam string key: the tag key to look the values up for.
- :auth: required
"""
analytics.record(
"eventuser_endpoint.request",
diff --git a/src/sentry/api/endpoints/organization_dashboards.py b/src/sentry/api/endpoints/organization_dashboards.py
index 7599bd16839449..fd6b9096f8e8f0 100644
--- a/src/sentry/api/endpoints/organization_dashboards.py
+++ b/src/sentry/api/endpoints/organization_dashboards.py
@@ -1,7 +1,7 @@
from __future__ import annotations
from django.db import IntegrityError, router, transaction
-from django.db.models import Case, IntegerField, When
+from django.db.models import Case, Exists, IntegerField, OuterRef, Value, When
from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.response import Response
@@ -28,8 +28,10 @@
from sentry.apidocs.examples.dashboard_examples import DashboardExamples
from sentry.apidocs.parameters import CursorQueryParam, GlobalParams, VisibilityParams
from sentry.apidocs.utils import inline_sentry_response_serializer
-from sentry.models.dashboard import Dashboard
+from sentry.db.models.fields.text import CharField
+from sentry.models.dashboard import Dashboard, DashboardFavoriteUser
from sentry.models.organization import Organization
+from sentry.users.services.user.service import user_service
MAX_RETRIES = 2
DUPLICATE_TITLE_PATTERN = r"(.*) copy(?:$|\s(\d+))"
@@ -163,14 +165,43 @@ def get(self, request: Request, organization) -> Response:
order_by = ["last_visited" if desc else "-last_visited"]
elif sort_by == "mydashboards":
- order_by = [
- Case(
- When(created_by_id=request.user.id, then=-1),
- default="created_by_id",
- output_field=IntegerField(),
- ),
- "-date_added",
- ]
+ if features.has(
+ "organizations:dashboards-table-view", organization, actor=request.user
+ ):
+ user_name_dict = {
+ user.id: user.name
+ for user in user_service.get_many_by_id(
+ ids=list(dashboards.values_list("created_by_id", flat=True))
+ )
+ }
+ dashboards = dashboards.annotate(
+ user_name=Case(
+ *[
+ When(created_by_id=user_id, then=Value(user_name))
+ for user_id, user_name in user_name_dict.items()
+ ],
+ default=Value(""),
+ output_field=CharField(),
+ )
+ )
+ order_by = [
+ Case(
+ When(created_by_id=request.user.id, then=-1),
+ default=1,
+ output_field=IntegerField(),
+ ),
+ "-user_name" if desc else "user_name",
+ "-date_added",
+ ]
+ else:
+ order_by = [
+ Case(
+ When(created_by_id=request.user.id, then=-1),
+ default="created_by_id",
+ output_field=IntegerField(),
+ ),
+ "-date_added",
+ ]
elif sort_by == "myDashboardsAndRecentlyViewed":
order_by = [
@@ -184,9 +215,13 @@ def get(self, request: Request, organization) -> Response:
if features.has("organizations:dashboards-favourite", organization, actor=request.user):
pin_by = request.query_params.get("pin")
if pin_by == "favorites":
+ favorited_by_subquery = DashboardFavoriteUser.objects.filter(
+ dashboard=OuterRef("pk"), user_id=request.user.id
+ )
+
order_by_favorites = [
Case(
- When(dashboardfavoriteuser__user_id=request.user.id, then=-1),
+ When(Exists(favorited_by_subquery), then=-1),
default=1,
output_field=IntegerField(),
)
diff --git a/src/sentry/api/endpoints/organization_events.py b/src/sentry/api/endpoints/organization_events.py
index 938f8609eb01e1..80577c317c5ed3 100644
--- a/src/sentry/api/endpoints/organization_events.py
+++ b/src/sentry/api/endpoints/organization_events.py
@@ -35,6 +35,7 @@
)
from sentry.snuba.metrics.extraction import MetricSpecType
from sentry.snuba.referrer import Referrer
+from sentry.snuba.types import DatasetQuery
from sentry.snuba.utils import dataset_split_decision_inferred_from_query, get_dataset
from sentry.types.ratelimit import RateLimit, RateLimitCategory
from sentry.utils.snuba import SnubaError
@@ -55,13 +56,14 @@ class DiscoverDatasetSplitException(Exception):
pass
-ALLOWED_EVENTS_REFERRERS = {
+ALLOWED_EVENTS_REFERRERS: set[str] = {
Referrer.API_ORGANIZATION_EVENTS.value,
Referrer.API_ORGANIZATION_EVENTS_V2.value,
Referrer.API_DASHBOARDS_TABLEWIDGET.value,
Referrer.API_DASHBOARDS_BIGNUMBERWIDGET.value,
Referrer.API_DISCOVER_TRANSACTIONS_LIST.value,
Referrer.API_DISCOVER_QUERY_TABLE.value,
+ Referrer.API_INSIGHTS_USER_GEO_SUBREGION_SELECTOR.value,
Referrer.API_PERFORMANCE_BROWSER_RESOURCE_MAIN_TABLE.value,
Referrer.API_PERFORMANCE_BROWSER_RESOURCES_PAGE_SELECTOR.value,
Referrer.API_PERFORMANCE_BROWSER_WEB_VITALS_PROJECT.value,
@@ -167,10 +169,11 @@ class DiscoverDatasetSplitException(Exception):
Referrer.API_PERFORMANCE_MOBILE_UI_METRICS_RIBBON.value,
Referrer.API_PERFORMANCE_SPAN_SUMMARY_HEADER_DATA.value,
Referrer.API_PERFORMANCE_SPAN_SUMMARY_TABLE.value,
- Referrer.API_EXPLORE_SPANS_SAMPLES_TABLE,
+ Referrer.API_EXPLORE_SPANS_SAMPLES_TABLE.value,
+ Referrer.ISSUE_DETAILS_STREAMLINE_GRAPH.value,
+ Referrer.ISSUE_DETAILS_STREAMLINE_LIST.value,
}
-API_TOKEN_REFERRER = Referrer.API_AUTH_TOKEN_EVENTS.value
LEGACY_RATE_LIMIT = dict(limit=30, window=1, concurrent_limit=15)
# reduced limit will be the future default for all organizations not explicitly on increased limit
@@ -273,8 +276,7 @@ class OrganizationEventsEndpoint(OrganizationEventsV2EndpointBase):
enforce_rate_limit = True
- def rate_limits(*args, **kwargs) -> dict[str, dict[RateLimitCategory, RateLimit]]:
- return rate_limit_events(*args, **kwargs)
+ rate_limits = rate_limit_events
def get_features(self, organization: Organization, request: Request) -> Mapping[str, bool]:
feature_names = [
@@ -295,11 +297,13 @@ def get_features(self, organization: Organization, request: Request) -> Mapping[
actor=request.user,
)
- all_features = (
- batch_features.get(f"organization:{organization.id}", {})
- if batch_features is not None
- else {}
- )
+ all_features: dict[str, bool] = {}
+
+ if batch_features is not None:
+ for feature_name, result in batch_features.get(
+ f"organization:{organization.id}", {}
+ ).items():
+ all_features[feature_name] = bool(result)
for feature_name in feature_names:
if feature_name not in all_features:
@@ -379,7 +383,7 @@ def get(self, request: Request, organization) -> Response:
}
)
except InvalidParams as err:
- raise ParseError(err)
+ raise ParseError(detail=str(err))
batch_features = self.get_features(organization, request)
@@ -418,7 +422,9 @@ def get(self, request: Request, organization) -> Response:
# Force the referrer to "api.auth-token.events" for events requests authorized through a bearer token
if request.auth:
- referrer = API_TOKEN_REFERRER
+ referrer = Referrer.API_AUTH_TOKEN_EVENTS.value
+ elif referrer is None:
+ referrer = Referrer.API_ORGANIZATION_EVENTS.value
elif referrer not in ALLOWED_EVENTS_REFERRERS:
if referrer:
with sentry_sdk.isolation_scope() as scope:
@@ -431,12 +437,18 @@ def get(self, request: Request, organization) -> Response:
use_aggregate_conditions = request.GET.get("allowAggregateConditions", "1") == "1"
# Only works when dataset == spans
use_rpc = request.GET.get("useRpc", "0") == "1"
-
- def _data_fn(scoped_dataset, offset, limit, query) -> dict[str, Any]:
+ sentry_sdk.set_tag("performance.use_rpc", use_rpc)
+
+ def _data_fn(
+ dataset_query: DatasetQuery,
+ offset: int,
+ limit: int,
+ query: str | None,
+ ):
if use_rpc and dataset == spans_eap:
return spans_rpc.run_table_query(
params=snuba_params,
- query_string=query,
+ query_string=query or "",
selected_columns=self.get_field_list(organization, request),
orderby=self.get_orderby(request),
offset=offset,
@@ -448,9 +460,9 @@ def _data_fn(scoped_dataset, offset, limit, query) -> dict[str, Any]:
),
)
query_source = self.get_request_source(request)
- return scoped_dataset.query(
+ return dataset_query(
selected_columns=self.get_field_list(organization, request),
- query=query,
+ query=query or "",
snuba_params=snuba_params,
equations=self.get_equation_list(organization, request),
orderby=self.get_orderby(request),
@@ -459,24 +471,30 @@ def _data_fn(scoped_dataset, offset, limit, query) -> dict[str, Any]:
referrer=referrer,
auto_fields=True,
auto_aggregations=True,
- use_aggregate_conditions=use_aggregate_conditions,
allow_metric_aggregates=allow_metric_aggregates,
+ use_aggregate_conditions=use_aggregate_conditions,
transform_alias_to_input_format=True,
# Whether the flag is enabled or not, regardless of the referrer
has_metrics=use_metrics,
use_metrics_layer=batch_features.get("organizations:use-metrics-layer", False),
on_demand_metrics_enabled=on_demand_metrics_enabled,
on_demand_metrics_type=on_demand_metrics_type,
- query_source=query_source,
fallback_to_transactions=features.has(
"organizations:performance-discover-dataset-selector",
organization,
actor=request.user,
),
+ query_source=query_source,
)
@sentry_sdk.tracing.trace
- def _dashboards_data_fn(scoped_dataset, offset, limit, scoped_query, dashboard_widget_id):
+ def _dashboards_data_fn(
+ scoped_dataset_query: DatasetQuery,
+ offset: int,
+ limit: int,
+ scoped_query: str | None,
+ dashboard_widget_id: str,
+ ):
try:
widget = DashboardWidget.objects.get(id=dashboard_widget_id)
does_widget_have_split = widget.discover_widget_split is not None
@@ -487,27 +505,29 @@ def _dashboards_data_fn(scoped_dataset, offset, limit, scoped_query, dashboard_w
)
if does_widget_have_split and not has_override_feature:
+ dataset_query: DatasetQuery
+
# This is essentially cached behaviour and we skip the check
if widget.discover_widget_split == DashboardWidgetTypes.ERROR_EVENTS:
- split_dataset = errors
+ dataset_query = errors.query
elif widget.discover_widget_split == DashboardWidgetTypes.TRANSACTION_LIKE:
# We can't add event.type:transaction for now because of on-demand.
- split_dataset = scoped_dataset
+ dataset_query = scoped_dataset_query
else:
- split_dataset = discover
+ dataset_query = discover.query
- return _data_fn(split_dataset, offset, limit, scoped_query)
+ return _data_fn(dataset_query, offset, limit, scoped_query)
with handle_query_errors():
try:
- error_results = _data_fn(errors, offset, limit, scoped_query)
+ error_results = _data_fn(errors.query, offset, limit, scoped_query)
# Widget has not split the discover dataset yet, so we need to check if there are errors etc.
has_errors = len(error_results["data"]) > 0
except SnubaError:
has_errors = False
error_results = None
- original_results = _data_fn(scoped_dataset, offset, limit, scoped_query)
+ original_results = _data_fn(scoped_dataset_query, offset, limit, scoped_query)
if original_results.get("data") is not None:
dataset_meta = original_results.get("meta", {})
else:
@@ -524,7 +544,9 @@ def _dashboards_data_fn(scoped_dataset, offset, limit, scoped_query, dashboard_w
if has_errors and has_other_data and not using_metrics:
# In the case that the original request was not using the metrics dataset, we cannot be certain that other data is solely transactions.
sentry_sdk.set_tag("third_split_query", True)
- transaction_results = _data_fn(transactions, offset, limit, scoped_query)
+ transaction_results = _data_fn(
+ transactions.query, offset, limit, scoped_query
+ )
has_transactions = len(transaction_results["data"]) > 0
decision = self.save_split_decision(
@@ -532,7 +554,7 @@ def _dashboards_data_fn(scoped_dataset, offset, limit, scoped_query, dashboard_w
)
if decision == DashboardWidgetTypes.DISCOVER:
- return _data_fn(discover, offset, limit, scoped_query)
+ return _data_fn(discover.query, offset, limit, scoped_query)
elif decision == DashboardWidgetTypes.TRANSACTION_LIKE:
original_results["meta"]["discoverSplitDecision"] = (
DashboardWidgetTypes.get_type_name(
@@ -550,13 +572,19 @@ def _dashboards_data_fn(scoped_dataset, offset, limit, scoped_query, dashboard_w
except Exception as e:
# Swallow the exception if it was due to the discover split, and try again one more time.
if isinstance(e, ParseError):
- return _data_fn(scoped_dataset, offset, limit, scoped_query)
+ return _data_fn(scoped_dataset_query, offset, limit, scoped_query)
sentry_sdk.capture_exception(e)
- return _data_fn(scoped_dataset, offset, limit, scoped_query)
+ return _data_fn(scoped_dataset_query, offset, limit, scoped_query)
@sentry_sdk.tracing.trace
- def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_saved_query_id):
+ def _discover_data_fn(
+ scoped_dataset_query: DatasetQuery,
+ offset: int,
+ limit: int,
+ scoped_query: str | None,
+ discover_saved_query_id: str,
+ ):
try:
discover_query = DiscoverSavedQuery.objects.get(
id=discover_saved_query_id, organization=organization
@@ -565,7 +593,7 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save
discover_query.dataset is not DiscoverSavedQueryTypes.DISCOVER
)
if does_widget_have_split:
- return _data_fn(scoped_dataset, offset, limit, scoped_query)
+ return _data_fn(scoped_dataset_query, offset, limit, scoped_query)
dataset_inferred_from_query = dataset_split_decision_inferred_from_query(
self.get_field_list(organization, request),
@@ -576,9 +604,11 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save
# See if we can infer which dataset based on selected columns and query string.
with handle_query_errors():
- if dataset_inferred_from_query is not None:
+ if (
+ dataset := SAVED_QUERY_DATASET_MAP.get(dataset_inferred_from_query)
+ ) is not None:
result = _data_fn(
- SAVED_QUERY_DATASET_MAP[dataset_inferred_from_query],
+ dataset.query,
offset,
limit,
scoped_query,
@@ -602,11 +632,11 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save
with ThreadPoolExecutor(max_workers=3) as exe:
futures = {
exe.submit(
- _data_fn, get_dataset(dataset_), offset, limit, scoped_query
- ): dataset_
- for dataset_ in [
- "errors",
- "transactions",
+ _data_fn, dataset_query, offset, limit, scoped_query
+ ): dataset_name
+ for dataset_name, dataset_query in [
+ ("errors", errors.query),
+ ("transactions", transactions.query),
]
}
@@ -660,10 +690,10 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save
except Exception as e:
# Swallow the exception if it was due to the discover split, and try again one more time.
if isinstance(e, ParseError):
- return _data_fn(scoped_dataset, offset, limit, scoped_query)
+ return _data_fn(scoped_dataset_query, offset, limit, scoped_query)
sentry_sdk.capture_exception(e)
- return _data_fn(scoped_dataset, offset, limit, scoped_query)
+ return _data_fn(scoped_dataset_query, offset, limit, scoped_query)
def data_fn_factory(scoped_dataset):
"""
@@ -677,17 +707,17 @@ def data_fn_factory(scoped_dataset):
dashboard_widget_id = request.GET.get("dashboardWidgetId", None)
discover_saved_query_id = request.GET.get("discoverSavedQueryId", None)
- def fn(offset, limit) -> dict[str, Any]:
+ def fn(offset, limit):
if save_discover_dataset_decision and discover_saved_query_id:
return _discover_data_fn(
- scoped_dataset, offset, limit, scoped_query, discover_saved_query_id
+ scoped_dataset.query, offset, limit, scoped_query, discover_saved_query_id
)
if not (metrics_enhanced and dashboard_widget_id):
- return _data_fn(scoped_dataset, offset, limit, scoped_query)
+ return _data_fn(scoped_dataset.query, offset, limit, scoped_query)
return _dashboards_data_fn(
- scoped_dataset, offset, limit, scoped_query, dashboard_widget_id
+ scoped_dataset.query, offset, limit, scoped_query, dashboard_widget_id
)
return fn
diff --git a/src/sentry/api/endpoints/organization_member/details.py b/src/sentry/api/endpoints/organization_member/details.py
index 1564634770ea69..28a78b458a3545 100644
--- a/src/sentry/api/endpoints/organization_member/details.py
+++ b/src/sentry/api/endpoints/organization_member/details.py
@@ -226,10 +226,7 @@ def put(
is_member = not (
request.access.has_scope("member:invite") and request.access.has_scope("member:admin")
)
- enable_member_invite = (
- features.has("organizations:members-invite-teammates", organization)
- and not organization.flags.disable_member_invite
- )
+ enable_member_invite = not organization.flags.disable_member_invite
# Members can only resend invites
reinvite_request_only = set(result.keys()).issubset({"reinvite", "regenerate"})
# Members can only resend invites that they sent
@@ -470,8 +467,7 @@ def delete(
if acting_member != member:
if not request.access.has_scope("member:admin"):
if (
- features.has("organizations:members-invite-teammates", organization)
- and not organization.flags.disable_member_invite
+ not organization.flags.disable_member_invite
and request.access.has_scope("member:invite")
):
return self._handle_deletion_by_member(
diff --git a/src/sentry/api/endpoints/organization_metrics_meta.py b/src/sentry/api/endpoints/organization_metrics_meta.py
index d31a96d9dc2a03..42f7ac530d692e 100644
--- a/src/sentry/api/endpoints/organization_metrics_meta.py
+++ b/src/sentry/api/endpoints/organization_metrics_meta.py
@@ -11,7 +11,6 @@
from sentry.snuba import metrics_performance
COUNT_UNPARAM = "count_unparameterized_transactions()"
-COUNT_HAS_TXN = "count_has_transaction_name()"
COUNT_NULL = "count_null_transactions()"
diff --git a/src/sentry/api/endpoints/organization_release_files.py b/src/sentry/api/endpoints/organization_release_files.py
index 4e057726f2c47f..6acd32c6126b34 100644
--- a/src/sentry/api/endpoints/organization_release_files.py
+++ b/src/sentry/api/endpoints/organization_release_files.py
@@ -52,6 +52,9 @@ def post(self, request: Request, organization, version) -> Response:
Unlike other API requests, files must be uploaded using the
traditional multipart/form-data content-type.
+ Requests to this endpoint should use the region-specific domain
+ eg. `us.sentry.io` or `de.sentry.io`
+
The optional 'name' attribute should reflect the absolute path
that this file will be referenced as. For example, in the case of
JavaScript you might specify the full web URI.
diff --git a/src/sentry/api/endpoints/organization_releases.py b/src/sentry/api/endpoints/organization_releases.py
index ac88f8c0442372..dc4810abe3e290 100644
--- a/src/sentry/api/endpoints/organization_releases.py
+++ b/src/sentry/api/endpoints/organization_releases.py
@@ -437,8 +437,11 @@ def post(self, request: Request, organization) -> Response:
:pparam string organization_id_or_slug: the id or slug of the organization the
release belongs to.
- :param string version: a version identifier for this release. Can
- be a version number, a commit hash etc.
+ :param string version: a version identifier for this release. Can
+ be a version number, a commit hash etc. It cannot contain certain
+ whitespace characters (`\\r`, `\\n`, `\\f`, `\\x0c`, `\\t`) or any
+ slashes (`\\`, `/`). The version names `.`, `..` and `latest` are also
+ reserved, and cannot be used.
:param string ref: an optional commit reference. This is useful if
a tagged version has been provided.
:param url url: a URL that points to the release. This can be the
diff --git a/src/sentry/api/endpoints/organization_sessions.py b/src/sentry/api/endpoints/organization_sessions.py
index 372c1e58b40128..5d418045c2239d 100644
--- a/src/sentry/api/endpoints/organization_sessions.py
+++ b/src/sentry/api/endpoints/organization_sessions.py
@@ -1,7 +1,6 @@
from contextlib import contextmanager
import sentry_sdk
-from django.utils.datastructures import MultiValueDict
from drf_spectacular.utils import extend_schema
from rest_framework.exceptions import ParseError
from rest_framework.request import Request
@@ -117,15 +116,10 @@ def build_sessions_query(
except NoProjects:
raise NoProjects("No projects available") # give it a description
- # HACK to prevent front-end crash when release health is sessions-based:
- query_params = MultiValueDict(request.GET)
- if not release_health.backend.is_metrics_based() and request.GET.get("interval") == "10s":
- query_params["interval"] = "1m"
-
query_config = release_health.backend.sessions_query_config(organization)
return QueryDefinition(
- query=query_params,
+ query=request.GET,
params=params,
offset=offset,
limit=limit,
diff --git a/src/sentry/api/endpoints/project_backfill_similar_issues_embeddings_records.py b/src/sentry/api/endpoints/project_backfill_similar_issues_embeddings_records.py
index a3b4598fbab85c..c2b5bea900ef93 100644
--- a/src/sentry/api/endpoints/project_backfill_similar_issues_embeddings_records.py
+++ b/src/sentry/api/endpoints/project_backfill_similar_issues_embeddings_records.py
@@ -2,7 +2,6 @@
from rest_framework.request import Request
from rest_framework.response import Response
-from sentry import features
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
@@ -22,10 +21,6 @@ class ProjectBackfillSimilarIssuesEmbeddingsRecords(ProjectEndpoint):
}
def post(self, request: Request, project: Project) -> Response:
- # needs to have the flag to run
- if not features.has("projects:similarity-embeddings-backfill", project):
- return Response(status=404)
-
# needs to either be a superuser or be in single org mode
if not (is_active_superuser(request) or settings.SENTRY_SINGLE_ORGANIZATION):
return Response(status=404)
diff --git a/src/sentry/api/endpoints/project_release_files.py b/src/sentry/api/endpoints/project_release_files.py
index 5674787662a85d..cedb8e32f5ae6b 100644
--- a/src/sentry/api/endpoints/project_release_files.py
+++ b/src/sentry/api/endpoints/project_release_files.py
@@ -272,6 +272,9 @@ def post(self, request: Request, project, version) -> Response:
Unlike other API requests, files must be uploaded using the
traditional multipart/form-data content-type.
+ Requests to this endpoint should use the region-specific domain
+ eg. `us.sentry.io` or `de.sentry.io`
+
The optional 'name' attribute should reflect the absolute path
that this file will be referenced as. For example, in the case of
JavaScript you might specify the full web URI.
diff --git a/src/sentry/api/endpoints/project_rule_preview.py b/src/sentry/api/endpoints/project_rule_preview.py
index 713fb931d31912..0ee40b273db646 100644
--- a/src/sentry/api/endpoints/project_rule_preview.py
+++ b/src/sentry/api/endpoints/project_rule_preview.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
from collections.abc import Mapping
from typing import Any
@@ -10,10 +12,11 @@
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
from sentry.api.bases.project import ProjectAlertRulePermission, ProjectEndpoint
-from sentry.api.serializers import GroupSerializer, serialize
+from sentry.api.serializers import serialize
+from sentry.api.serializers.models.group import BaseGroupSerializerResponse, GroupSerializer
from sentry.api.serializers.rest_framework.rule import RulePreviewSerializer
from sentry.models.group import Group
-from sentry.models.groupinbox import get_inbox_details
+from sentry.models.groupinbox import InboxDetails, get_inbox_details
from sentry.rules.history.preview import preview
@@ -86,12 +89,19 @@ def post(self, request: Request, project) -> Response:
return response
+class _PreviewResponse(BaseGroupSerializerResponse):
+ inbox: InboxDetails
+ lastTriggered: int
+
+
class PreviewSerializer(GroupSerializer):
def serialize(
- self, obj: dict[str, Any], attrs: Mapping[Any, Any], user: Any, **kwargs: Any
- ) -> dict[str, Any]:
+ self, obj: Group, attrs: Mapping[Any, Any], user: Any, **kwargs: Any
+ ) -> _PreviewResponse:
result = super().serialize(obj, attrs, user, **kwargs)
group_id = int(result["id"])
- result["inbox"] = kwargs["inbox_details"].get(group_id)
- result["lastTriggered"] = kwargs["group_fires"][group_id]
- return result
+ return {
+ **result,
+ "inbox": kwargs["inbox_details"].get(group_id),
+ "lastTriggered": kwargs["group_fires"][group_id],
+ }
diff --git a/src/sentry/api/endpoints/project_servicehook_stats.py b/src/sentry/api/endpoints/project_servicehook_stats.py
index dffeb8c606ae61..c6d6c1e35a758e 100644
--- a/src/sentry/api/endpoints/project_servicehook_stats.py
+++ b/src/sentry/api/endpoints/project_servicehook_stats.py
@@ -26,9 +26,9 @@ def get(self, request: Request, project, hook_id) -> Response:
stat_args = self._parse_args(request)
- stats = {}
+ stats: dict[int, dict[str, int]] = {}
for model, name in ((TSDBModel.servicehook_fired, "total"),):
- result = tsdb.get_range(
+ result = tsdb.backend.get_range(
model=model,
keys=[hook.id],
**stat_args,
diff --git a/src/sentry/api/endpoints/project_team_details.py b/src/sentry/api/endpoints/project_team_details.py
index d28572ab6d09b1..de546e775ecf06 100644
--- a/src/sentry/api/endpoints/project_team_details.py
+++ b/src/sentry/api/endpoints/project_team_details.py
@@ -90,7 +90,12 @@ def post(self, request: Request, project, team: Team) -> Response:
event=audit_log.get_event_id("PROJECT_TEAM_ADD"),
data={"team_slug": team.slug, "project_slug": project.slug},
)
- return Response(serialize(project, request.user, ProjectWithTeamSerializer()), status=201)
+ return Response(
+ serialize(
+ project, request.user, ProjectWithTeamSerializer(collapse=["unusedFeatures"])
+ ),
+ status=201,
+ )
@extend_schema(
operation_id="Delete a Team from a Project",
diff --git a/src/sentry/api/endpoints/team_release_count.py b/src/sentry/api/endpoints/team_release_count.py
index 7d7806b836b4f3..a0f7b34ca329fd 100644
--- a/src/sentry/api/endpoints/team_release_count.py
+++ b/src/sentry/api/endpoints/team_release_count.py
@@ -47,8 +47,8 @@ def get(self, request: Request, team) -> Response:
)
agg_project_counts = {}
- project_avgs = defaultdict(int)
- this_week_totals = defaultdict(int)
+ project_avgs: dict[int, float] = defaultdict(int)
+ this_week_totals: dict[int, int] = defaultdict(int)
this_week_start = now() - timedelta(days=7)
for row in per_project_daily_release_counts:
project_avgs[row["projects"]] += row["count"]
@@ -56,8 +56,8 @@ def get(self, request: Request, team) -> Response:
if row["bucket"] >= this_week_start:
this_week_totals[row["projects"]] += row["count"]
- for row in project_avgs:
- project_avgs[row] = (project_avgs[row] / (end - start).days) * 7
+ for project_id in project_avgs:
+ project_avgs[project_id] = (project_avgs[project_id] / (end - start).days) * 7
current_day = start.date()
end_date = end.date()
diff --git a/src/sentry/api/endpoints/user_organizationintegrations.py b/src/sentry/api/endpoints/user_organizationintegrations.py
index fb772593004615..ced6ea411bd85d 100644
--- a/src/sentry/api/endpoints/user_organizationintegrations.py
+++ b/src/sentry/api/endpoints/user_organizationintegrations.py
@@ -8,6 +8,7 @@
from sentry.api.serializers import serialize
from sentry.constants import ObjectStatus
from sentry.integrations.models.organization_integration import OrganizationIntegration
+from sentry.organizations.services.organization import organization_service
from sentry.users.api.bases.user import UserEndpoint
from sentry.users.services.user.service import user_service
@@ -33,8 +34,15 @@ def get(self, request: Request, user) -> Response:
if request.user.id is not None
else ()
)
+ organization_ids = []
+ for o in organizations:
+ org_context = organization_service.get_organization_by_id(
+ id=o.id, user_id=request.user.id
+ )
+ if org_context and org_context.member and "org:read" in org_context.member.scopes:
+ organization_ids.append(o.id)
queryset = OrganizationIntegration.objects.filter(
- organization_id__in=[o.id for o in organizations],
+ organization_id__in=organization_ids,
status=ObjectStatus.ACTIVE,
integration__status=ObjectStatus.ACTIVE,
)
diff --git a/src/sentry/api/event_search.py b/src/sentry/api/event_search.py
index dfaec71ec874c0..cc76cff4940dca 100644
--- a/src/sentry/api/event_search.py
+++ b/src/sentry/api/event_search.py
@@ -145,7 +145,6 @@
raw_aggregate_param = ~r"[^()\t\n, \"]+"
quoted_aggregate_param = '"' ('\\"' / ~r'[^\t\n\"]')* '"'
search_key = explicit_number_tag_key / key / quoted_key
-search_type = "number" / "string"
text_key = explicit_tag_key / explicit_string_tag_key / search_key
value = ~r"[^()\t\n ]*"
quoted_value = '"' ('\\"' / ~r'[^"]')* '"'
@@ -1071,9 +1070,6 @@ def visit_explicit_string_tag_key(self, node, children):
def visit_explicit_number_tag_key(self, node, children):
return SearchKey(f"tags[{children[2].name},number]")
- def visit_search_type(self, node, children):
- return node.text
-
def visit_aggregate_key(self, node, children):
children = remove_optional_nodes(children)
children = remove_space(children)
diff --git a/src/sentry/api/exceptions.py b/src/sentry/api/exceptions.py
index 2e5689a148aacd..babd399f8d9655 100644
--- a/src/sentry/api/exceptions.py
+++ b/src/sentry/api/exceptions.py
@@ -118,6 +118,15 @@ def __init__(self, user):
super().__init__(username=user.username)
+class PrimaryEmailVerificationRequired(SentryAPIException):
+ status_code = status.HTTP_401_UNAUTHORIZED
+ code = "primary-email-verification-required"
+ message = "Primary email verification required."
+
+ def __init__(self, user):
+ super().__init__(username=user.username)
+
+
class TwoFactorRequired(SentryAPIException):
status_code = status.HTTP_401_UNAUTHORIZED
code = "2fa-required"
diff --git a/src/sentry/api/helpers/group_index/delete.py b/src/sentry/api/helpers/group_index/delete.py
index 0aa10caf082221..bdfd3ae683d361 100644
--- a/src/sentry/api/helpers/group_index/delete.py
+++ b/src/sentry/api/helpers/group_index/delete.py
@@ -124,7 +124,7 @@ def delete_groups(
request: Request,
projects: Sequence[Project],
organization_id: int,
- search_fn: SearchFunction,
+ search_fn: SearchFunction | None = None,
) -> Response:
"""
`search_fn` refers to the `search.query` method with the appropriate
@@ -139,7 +139,7 @@ def delete_groups(
id__in=set(group_ids),
).exclude(status__in=[GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS])
)
- else:
+ elif search_fn:
try:
cursor_result, _ = search_fn(
{
diff --git a/src/sentry/api/helpers/group_index/update.py b/src/sentry/api/helpers/group_index/update.py
index a95ebdfdbf8149..180c8ff71ebfbc 100644
--- a/src/sentry/api/helpers/group_index/update.py
+++ b/src/sentry/api/helpers/group_index/update.py
@@ -165,10 +165,7 @@ def get_current_release_version_of_group(group: Group, follows_semver: bool = Fa
def update_groups(
request: Request,
- group_ids: Sequence[int | str] | None,
- projects: Sequence[Project],
- organization_id: int,
- search_fn: SearchFunction | None = None,
+ groups: Sequence[Group],
user: RpcUser | User | AnonymousUser | None = None,
data: Mapping[str, Any] | None = None,
) -> Response:
@@ -178,17 +175,15 @@ def update_groups(
acting_user = user if user and user.is_authenticated else None
data = data or request.data
- try:
- group_ids, group_list = get_group_ids_and_group_list(
- organization_id, projects, group_ids, search_fn
- )
- except ValidationError:
- logger.exception("Error getting group ids and group list") # Track the error in Sentry
- return Response(
- {"detail": "Invalid query. Error getting group ids and group list"}, status=400
- )
+ # so we won't have to requery for each group
+ project_lookup = {g.project_id: g.project for g in groups}
+ projects = list(project_lookup.values())
+
+ # Assert all projects belong to the same organization
+ if len({p.organization_id for p in projects}) > 1:
+ return Response({"detail": "All groups must belong to same organization."}, status=400)
- if not group_ids or not group_list:
+ if not groups:
return Response({"detail": "No groups found"}, status=204)
serializer = validate_request(request, projects, data)
@@ -201,17 +196,9 @@ def update_groups(
acting_user = user if user.is_authenticated else None
- # so we won't have to requery for each group
- project_lookup = {g.project_id: g.project for g in group_list}
- group_project_ids = {g.project_id for g in group_list}
- # filter projects down to only those that have groups in the search results
- projects = [p for p in projects if p.id in group_project_ids]
-
- queryset = Group.objects.filter(id__in=group_ids)
-
discard = result.get("discard")
if discard:
- return handle_discard(request, list(queryset), projects, acting_user)
+ return handle_discard(request, groups, projects, acting_user)
status_details = result.pop("statusDetails", result)
status = result.get("status")
@@ -219,7 +206,7 @@ def update_groups(
if "priority" in result:
handle_priority(
priority=result["priority"],
- group_list=group_list,
+ group_list=groups,
acting_user=acting_user,
project_lookup=project_lookup,
)
@@ -228,7 +215,7 @@ def update_groups(
result, res_type = handle_resolve_in_release(
status,
status_details,
- group_list,
+ groups,
projects,
project_lookup,
acting_user,
@@ -240,7 +227,7 @@ def update_groups(
elif status:
result = handle_other_status_updates(
result,
- group_list,
+ groups,
projects,
project_lookup,
status_details,
@@ -250,7 +237,7 @@ def update_groups(
return prepare_response(
result,
- group_list,
+ groups,
project_lookup,
projects,
acting_user,
@@ -260,6 +247,40 @@ def update_groups(
)
+def update_groups_with_search_fn(
+ request: Request,
+ group_ids: Sequence[int | str] | None,
+ projects: Sequence[Project],
+ organization_id: int,
+ search_fn: SearchFunction,
+) -> Response:
+ group_list = []
+ if group_ids:
+ group_list = get_group_list(organization_id, projects, group_ids)
+
+ if not group_list:
+ try:
+ # It can raise ValidationError
+ cursor_result, _ = search_fn(
+ {
+ "limit": BULK_MUTATION_LIMIT,
+ "paginator_options": {"max_limit": BULK_MUTATION_LIMIT},
+ }
+ )
+ except ValidationError:
+ logger.exception("Error getting group ids and group list") # Track the error in Sentry
+ return Response(
+ {"detail": "Invalid query. Error getting group ids and group list"}, status=400
+ )
+
+ group_list = list(cursor_result)
+
+ if not group_list:
+ return Response({"detail": "No groups found"}, status=204)
+
+ return update_groups(request, group_list)
+
+
def validate_request(
request: Request,
projects: Sequence[Project],
@@ -284,56 +305,36 @@ def validate_request(
return serializer
-def get_group_ids_and_group_list(
+def get_group_list(
organization_id: int,
projects: Sequence[Project],
- group_ids: Sequence[int | str] | None,
- search_fn: SearchFunction | None,
-) -> tuple[list[int | str], list[Group]]:
+ group_ids: Sequence[int | str],
+) -> list[Group]:
"""
- Gets group IDs and group list based on provided filters.
+ Gets group list based on provided filters.
Args:
organization_id: ID of the organization
projects: Sequence of projects to filter groups by
- group_ids: Optional sequence of specific group IDs to fetch
- search_fn: Optional search function to find groups if no IDs provided
+ group_ids: Sequence of specific group IDs to fetch
- Returns:
- Tuple of:
- - List of group IDs that were found
- - List of Group objects that were found
-
- Notes:
- - If group_ids provided, filters to only valid groups in the org/projects
- - If no group_ids but search_fn provided, uses search to find groups
- - Limited to BULK_MUTATION_LIMIT results when using search
+ Returns: List of Group objects filtered to only valid groups in the org/projects
"""
- _group_ids: list[int | str] = []
- _group_list: list[Group] = []
-
- if group_ids:
- _group_list = list(
+ groups = []
+ # Convert all group IDs to integers and filter out any non-integer values
+ group_ids_int = [int(gid) for gid in group_ids if str(gid).isdigit()]
+ if group_ids_int:
+ return list(
Group.objects.filter(
- project__organization_id=organization_id, project__in=projects, id__in=group_ids
+ project__organization_id=organization_id, project__in=projects, id__in=group_ids_int
)
)
- # filter down group ids to only valid matches
- _group_ids = [g.id for g in _group_list]
-
- if search_fn and not _group_ids:
- # It can raise ValidationError
- cursor_result, _ = search_fn(
- {
- "limit": BULK_MUTATION_LIMIT,
- "paginator_options": {"max_limit": BULK_MUTATION_LIMIT},
- }
- )
-
- _group_list = list(cursor_result)
- _group_ids = [g.id for g in _group_list]
+ else:
+ for group_id in group_ids:
+ if isinstance(group_id, str):
+ groups.append(Group.objects.by_qualified_short_id(organization_id, group_id))
- return _group_ids, _group_list
+ return groups
def handle_resolve_in_release(
@@ -459,6 +460,10 @@ def handle_resolve_in_release(
except IndexError:
release = None
for group in group_list:
+ # If the group is already resolved, we don't need to do anything
+ if group.status == GroupStatus.RESOLVED:
+ continue
+
with transaction.atomic(router.db_for_write(Group)):
process_group_resolution(
group,
@@ -706,9 +711,6 @@ def handle_other_status_updates(
new_substatus = infer_substatus(new_status, new_substatus, status_details, group_list)
with transaction.atomic(router.db_for_write(Group)):
- # TODO(gilbert): update() doesn't call pre_save and bypasses any substatus defaulting we have there
- # we should centralize the logic for validating and defaulting substatus values
- # and refactor pre_save and the above new_substatus assignment to account for this
status_updated = queryset.exclude(status=new_status).update(
status=new_status, substatus=new_substatus
)
diff --git a/src/sentry/api/helpers/ios_models.py b/src/sentry/api/helpers/ios_models.py
new file mode 100644
index 00000000000000..ab296dbbc0bd1b
--- /dev/null
+++ b/src/sentry/api/helpers/ios_models.py
@@ -0,0 +1,290 @@
+IPHONE4 = "iPhone 4"
+IPHONE5 = "iPhone 5"
+IPHONE5C = "iPhone 5c"
+IPHONE5S = "iPhone 5s"
+IPHONE7 = "iPhone 7"
+IPHONE7PLUS = "iPhone 7 Plus"
+IPHONE8 = "iPhone 8"
+IPHONE8PLUS = "iPhone 8 Plus"
+IPHONEX = "iPhone X"
+IPHONEXSMAX = "iPhone XS Max"
+
+IPAD2 = "iPad 2"
+IPADGEN3 = "iPad (3rd gen)"
+IPADGEN4 = "iPad (4th gen)"
+IPADGEN5 = "iPad (5th gen)"
+IPADGEN6 = "iPad (6th gen)"
+IPADGEN7 = "iPad (7th gen)"
+IPADGEN8 = "iPad (8th gen)"
+IPADGEN9 = "iPad (9th gen)"
+IPADGEN10 = "iPad (10th gen)"
+
+IPADAIRGEN1 = "iPad Air (1st gen)"
+IPADAIR2 = "iPad Air 2"
+IPADAIRGEN3 = "iPad Air (3rd gen)"
+IPADAIRGEN4 = "iPad Air (4th gen)"
+IPADAIRGEN5 = "iPad Air (5th gen)"
+IPADAIRGEN6 = "iPad Air (6th gen)"
+IPADAIRGEN7 = "iPad Air (7th gen)"
+
+IPADPRO9GEN1 = "iPad Pro (9.7-inch)"
+IPADPRO10 = "iPad Pro (10.5-inch)"
+IPADPRO11GEN1 = "iPad Pro (11-inch, 1st gen)"
+IPADPRO11GEN2 = "iPad Pro (11-inch, 2nd gen)"
+IPADPRO11GEN3 = "iPad Pro (11-inch, 3rd gen)"
+IPADPRO11GEN4 = "iPad Pro (11-inch, 4th gen)"
+IPADPRO11GEN5 = "iPad Pro (11 inch, 5th gen)"
+IPADPRO12GEN1 = "iPad Pro (12.9-inch, 1st gen)"
+IPADPRO12GEN2 = "iPad Pro (12.9-inch, 2nd gen)"
+IPADPRO12GEN3 = "iPad Pro (12.9-inch, 3rd gen)"
+IPADPRO12GEN4 = "iPad Pro (12.9-inch, 4th gen)"
+IPADPRO12GEN5 = "iPad Pro (12.9-inch, 5th gen)"
+IPADPRO12GEN6 = "iPad Pro (12.9-inch, 6th gen)"
+IPADPRO12GEN7 = "iPad Pro (12.9-inch, 7th gen)"
+
+IPADMINIGEN1 = "iPad mini (1st gen)"
+IPADMINI2 = "iPad mini 2"
+IPADMINI3 = "iPad mini 3"
+IPADMINI4 = "iPad mini 4"
+IPADMINIGEN5 = "iPad mini (5th gen)"
+IPADMINIGEN6 = "iPad mini (6th gen)"
+
+APPLEWATCHGEN1 = "Apple Watch (1st gen)"
+APPLEWATCHSERIES1 = "Apple Watch Series 1"
+APPLEWATCHSERIES2 = "Apple Watch Series 2"
+APPLEWATCHSERIES3 = "Apple Watch Series 3"
+APPLEWATCHSERIES4 = "Apple Watch Series 4"
+APPLEWATCHSERIES5 = "Apple Watch Series 5"
+APPLEWATCHSERIES6 = "Apple Watch Series 6"
+APPLEWATCHSERIES7 = "Apple Watch Series 7"
+APPLEWATCHSERIES8 = "Apple Watch Series 8"
+APPLEWATCHSERIES9 = "Apple Watch Series 9"
+APPLEWATCHSERIES10 = "Apple Watch Series 10"
+APPLEWATCHSE1 = "Apple Watch SE (1st gen)"
+APPLEWATCHSE2 = "Apple Watch SE (2nd gen)"
+
+APPLETVGEN1 = "Apple TV (1st gen)"
+APPLETVGEN2 = "Apple TV (2nd gen)"
+APPLETVGEN3 = "Apple TV (3rd gen)"
+
+# see https://theapplewiki.com/wiki/models
+IOS_MODELS: dict[str, str] = {
+ # iPhone
+ "iPhone1,1": "iPhone (1st gen)",
+ "iPhone1,2": "iPhone 3G",
+ "iPhone2,1": "iPhone 3GS",
+ "iPhone3,1": IPHONE4,
+ "iPhone3,2": IPHONE4,
+ "iPhone3,3": IPHONE4,
+ "iPhone4,1": "iPhone 4S",
+ "iPhone5,1": IPHONE5,
+ "iPhone5,2": IPHONE5,
+ "iPhone5,3": IPHONE5C,
+ "iPhone5,4": IPHONE5C,
+ "iPhone6,1": IPHONE5S,
+ "iPhone6,2": IPHONE5S,
+ "iPhone7,2": "iPhone 6",
+ "iPhone7,1": "iPhone 6 Plus",
+ "iPhone8,1": "iPhone 6s",
+ "iPhone8,2": "iPhone 6s Plus",
+ "iPhone8,4": "iPhone SE (1st gen)",
+ "iPhone9,1": IPHONE7,
+ "iPhone9,3": IPHONE7,
+ "iPhone9,2": IPHONE7PLUS,
+ "iPhone9,4": IPHONE7PLUS,
+ "iPhone10,1": IPHONE8,
+ "iPhone10,4": IPHONE8,
+ "iPhone10,2": IPHONE8PLUS,
+ "iPhone10,5": IPHONE8PLUS,
+ "iPhone10,3": IPHONEX,
+ "iPhone10,6": IPHONEX,
+ "iPhone11,8": "iPhone XR",
+ "iPhone11,2": "iPhone XS",
+ "iPhone11,4": IPHONEXSMAX,
+ "iPhone11,6": IPHONEXSMAX,
+ "iPhone12,1": "iPhone 11",
+ "iPhone12,3": "iPhone 11 Pro",
+ "iPhone12,5": "iPhone 11 Pro Max",
+ "iPhone12,8": "iPhone SE (2nd gen)",
+ "iPhone13,1": "iPhone 12 mini",
+ "iPhone13,2": "iPhone 12",
+ "iPhone13,3": "iPhone 12 Pro",
+ "iPhone13,4": "iPhone 12 Pro Max",
+ "iPhone14,4": "iPhone 13 mini",
+ "iPhone14,5": "iPhone 13",
+ "iPhone14,2": "iPhone 13 Pro",
+ "iPhone14,3": "iPhone 13 Pro Max",
+ "iPhone14,6": "iPhone SE (3rd gen)",
+ "iPhone14,7": "iPhone 14",
+ "iPhone14,8": "iPhone 14 Plus",
+ "iPhone15,2": "iPhone 14 Pro",
+ "iPhone15,3": "iPhone 14 Pro Max",
+ "iPhone15,4": "iPhone 15",
+ "iPhone15,5": "iPhone 15 Plus",
+ "iPhone16,1": "iPhone 15 Pro",
+ "iPhone16,2": "iPhone 15 Pro Max",
+ "iPhone17,1": "iPhone 16 Pro",
+ "iPhone17,2": "iPhone 16 Pro Max",
+ "iPhone17,3": "iPhone 16",
+ "iPhone17,4": "iPhone 16 Plus",
+ # iPod Touch
+ "iPod1,1": "iPod touch (1st gen)",
+ "iPod2,1": "iPod touch (2nd gen)",
+ "iPod3,1": "iPod touch (3rd gen)",
+ "iPod4,1": "iPod touch (4th gen)",
+ "iPod5,1": "iPod touch (5th gen)",
+ "iPod7,1": "iPod touch (6th gen)",
+ "iPod9,1": "iPod touch (7th gen)",
+ # iPad
+ "iPad1,1": "iPad (1st gen)",
+ "iPad2,1": IPAD2,
+ "iPad2,2": IPAD2,
+ "iPad2,3": IPAD2,
+ "iPad2,4": IPAD2,
+ "iPad3,1": IPADGEN3,
+ "iPad3,2": IPADGEN3,
+ "iPad3,3": IPADGEN3,
+ "iPad3,4": IPADGEN4,
+ "iPad3,5": IPADGEN4,
+ "iPad3,6": IPADGEN4,
+ "iPad6,11": IPADGEN5,
+ "iPad6,12": IPADGEN5,
+ "iPad7,5": IPADGEN6,
+ "iPad7,6": IPADGEN6,
+ "iPad7,11": IPADGEN7,
+ "iPad7,12": IPADGEN7,
+ "iPad11,6": IPADGEN8,
+ "iPad11,7": IPADGEN8,
+ "iPad12,1": IPADGEN9,
+ "iPad12,2": IPADGEN9,
+ # iPad Air
+ "iPad4,1": IPADAIRGEN1,
+ "iPad4,2": IPADAIRGEN1,
+ "iPad4,3": IPADAIRGEN1,
+ "iPad5,3": IPADAIR2,
+ "iPad5,4": IPADAIR2,
+ "iPad11,3": IPADAIRGEN3,
+ "iPad11,4": IPADAIRGEN3,
+ "iPad13,1": IPADAIRGEN4,
+ "iPad13,2": IPADAIRGEN4,
+ "iPad13,16": IPADAIRGEN5,
+ "iPad13,17": IPADAIRGEN5,
+ "iPad14,8": IPADAIRGEN6,
+ "iPad14,9": IPADAIRGEN6,
+ "iPad14,10": IPADAIRGEN7,
+ "iPad14,11": IPADAIRGEN7,
+ # iPad Pro
+ "iPad6,7": IPADPRO12GEN1,
+ "iPad6,3": IPADPRO9GEN1,
+ "iPad6,4": IPADPRO9GEN1,
+ "iPad6,8": IPADPRO12GEN1,
+ "iPad7,1": IPADPRO12GEN2,
+ "iPad7,2": IPADPRO12GEN2,
+ "iPad7,3": IPADPRO10,
+ "iPad7,4": IPADPRO10,
+ "iPad8,1": IPADPRO11GEN1,
+ "iPad8,2": IPADPRO11GEN1,
+ "iPad8,3": IPADPRO11GEN1,
+ "iPad8,4": IPADPRO11GEN1,
+ "iPad8,5": IPADPRO12GEN3,
+ "iPad8,6": IPADPRO12GEN3,
+ "iPad8,7": IPADPRO12GEN3,
+ "iPad8,8": IPADPRO12GEN3,
+ "iPad8,9": IPADPRO11GEN2,
+ "iPad8,10": IPADPRO11GEN2,
+ "iPad8,11": IPADPRO12GEN4,
+ "iPad8,12": IPADPRO12GEN4,
+ "iPad13,4": IPADPRO11GEN3,
+ "iPad13,5": IPADPRO11GEN3,
+ "iPad13,6": IPADPRO11GEN3,
+ "iPad13,7": IPADPRO11GEN3,
+ "iPad13,8": IPADPRO12GEN5,
+ "iPad13,9": IPADPRO12GEN5,
+ "iPad13,10": IPADPRO12GEN5,
+ "iPad13,11": IPADPRO12GEN5,
+ "iPad14,3": IPADPRO11GEN4,
+ "iPad14,4": IPADPRO11GEN4,
+ "iPad14,5": IPADPRO12GEN6,
+ "iPad14,6": IPADPRO12GEN6,
+ "iPad16,3": IPADPRO11GEN5,
+ "iPad16,4": IPADPRO11GEN5,
+ "iPad16,5": IPADPRO12GEN7,
+ "iPad16,6": IPADPRO12GEN7,
+ # iPad Mini
+ "iPad2,5": IPADMINIGEN1,
+ "iPad2,6": IPADMINIGEN1,
+ "iPad2,7": IPADMINIGEN1,
+ "iPad4,4": IPADMINI2,
+ "iPad4,5": IPADMINI2,
+ "iPad4,6": IPADMINI2,
+ "iPad4,7": IPADMINI3,
+ "iPad4,8": IPADMINI3,
+ "iPad4,9": IPADMINI3,
+ "iPad5,1": IPADMINI4,
+ "iPad5,2": IPADMINI4,
+ "iPad11,1": IPADMINIGEN5,
+ "iPad11,2": IPADMINIGEN5,
+ "iPad13,18": IPADGEN10,
+ "iPad13,19": IPADGEN10,
+ "iPad14,1": IPADMINIGEN6,
+ "iPad14,2": IPADMINIGEN6,
+ # Apple Watch
+ "Watch1,1": APPLEWATCHGEN1,
+ "Watch1,2": APPLEWATCHGEN1,
+ "Watch2,6": APPLEWATCHSERIES1,
+ "Watch2,7": APPLEWATCHSERIES1,
+ "Watch2,3": APPLEWATCHSERIES2,
+ "Watch2,4": APPLEWATCHSERIES2,
+ "Watch3,1": APPLEWATCHSERIES3,
+ "Watch3,2": APPLEWATCHSERIES3,
+ "Watch3,3": APPLEWATCHSERIES3,
+ "Watch3,4": APPLEWATCHSERIES3,
+ "Watch4,1": APPLEWATCHSERIES4,
+ "Watch4,2": APPLEWATCHSERIES4,
+ "Watch4,3": APPLEWATCHSERIES4,
+ "Watch4,4": APPLEWATCHSERIES4,
+ "Watch5,1": APPLEWATCHSERIES5,
+ "Watch5,2": APPLEWATCHSERIES5,
+ "Watch5,3": APPLEWATCHSERIES5,
+ "Watch5,4": APPLEWATCHSERIES5,
+ "Watch6,3": APPLEWATCHSERIES6,
+ "Watch6,4": APPLEWATCHSERIES6,
+ "Watch6,6": APPLEWATCHSERIES7,
+ "Watch6,7": APPLEWATCHSERIES7,
+ "Watch6,8": APPLEWATCHSERIES7,
+ "Watch6,9": APPLEWATCHSERIES7,
+ "Watch6,14": APPLEWATCHSERIES8,
+ "Watch6,15": APPLEWATCHSERIES8,
+ "Watch6,16": APPLEWATCHSERIES8,
+ "Watch6,17": APPLEWATCHSERIES8,
+ "Watch7,1": APPLEWATCHSERIES9,
+ "Watch7,2": APPLEWATCHSERIES9,
+ "Watch7,3": APPLEWATCHSERIES9,
+ "Watch7,4": APPLEWATCHSERIES9,
+ "Watch7,8": APPLEWATCHSERIES10,
+ "Watch7,9": APPLEWATCHSERIES10,
+ "Watch7,10": APPLEWATCHSERIES10,
+ "Watch7,11": APPLEWATCHSERIES10,
+ # Apple Watch SE
+ "Watch5,9": APPLEWATCHSE1,
+ "Watch5,10": APPLEWATCHSE1,
+ "Watch5,11": APPLEWATCHSE1,
+ "Watch5,12": APPLEWATCHSE1,
+ "Watch6,10": APPLEWATCHSE2,
+ "Watch6,11": APPLEWATCHSE2,
+ "Watch6,12": APPLEWATCHSE2,
+ "Watch6,13": APPLEWATCHSE2,
+ # Apple Watch Ultra
+ "Watch6,18": "Apple Watch Ultra (1st gen)",
+ "Watch7,5": "Apple Watch Ultra (2nd gen)",
+ # Apple TV
+ "AppleTV1,1": "Apple TV (1st gen)",
+ "AppleTV2,1": "Apple TV (2nd gen)",
+ "AppleTV3,1": APPLETVGEN3,
+ "AppleTV3,2": APPLETVGEN3,
+ "AppleTV5,3": "Apple TV (4th gen)",
+ "AppleTV6,2": "Apple TV 4K",
+ "AppleTV11,1": "Apple TV 4K (2nd gen)",
+ "i386": "iOS Simulator (i386)",
+ "x86_64": "iOS Simulator (x86_64)",
+}
diff --git a/src/sentry/api/helpers/mobile.py b/src/sentry/api/helpers/mobile.py
index b00a5a5f3b3c52..c991c3d2526670 100644
--- a/src/sentry/api/helpers/mobile.py
+++ b/src/sentry/api/helpers/mobile.py
@@ -1,7 +1,7 @@
from __future__ import annotations
from sentry.api.helpers.android_models import ANDROID_MODELS
-from sentry.profiles.device import IOS_MODELS
+from sentry.api.helpers.ios_models import IOS_MODELS
def get_readable_device_name(device: str) -> str | None:
diff --git a/src/sentry/api/serializers/models/environment.py b/src/sentry/api/serializers/models/environment.py
index 5f4413f45b9013..4f69d7468d4d08 100644
--- a/src/sentry/api/serializers/models/environment.py
+++ b/src/sentry/api/serializers/models/environment.py
@@ -1,15 +1,7 @@
-from collections import namedtuple
-from datetime import timedelta
from typing import TypedDict
-from django.utils import timezone
-
-from sentry import tsdb
from sentry.api.serializers import Serializer, register
from sentry.models.environment import Environment, EnvironmentProject
-from sentry.tsdb.base import TSDBModel
-
-StatsPeriod = namedtuple("StatsPeriod", ("segments", "interval"))
class EnvironmentSerializerResponse(TypedDict):
@@ -39,49 +31,3 @@ def serialize(
"name": obj.environment.name,
"isHidden": obj.is_hidden is True,
}
-
-
-class GroupEnvironmentWithStatsSerializer(EnvironmentSerializer):
- STATS_PERIODS = {
- "24h": StatsPeriod(24, timedelta(hours=1)),
- "30d": StatsPeriod(30, timedelta(hours=24)),
- }
-
- def __init__(self, group, since=None, until=None):
- self.group = group
- self.since = since
- self.until = until
-
- def get_attrs(self, item_list, user, **kwargs):
- attrs = {item: {"stats": {}} for item in item_list}
- items = {self.group.id: []}
- for item in item_list:
- items[self.group.id].append(item.id)
-
- for key, (segments, interval) in self.STATS_PERIODS.items():
- until = self.until or timezone.now()
- since = self.since or until - (segments * interval)
-
- try:
- stats = tsdb.get_frequency_series(
- model=TSDBModel.frequent_environments_by_group,
- items=items,
- start=since,
- end=until,
- rollup=int(interval.total_seconds()),
- )
- except NotImplementedError:
- # TODO(dcramer): probably should log this, but not worth
- # erring out
- stats = {}
-
- for item in item_list:
- attrs[item]["stats"][key] = [
- (k, v[item.id]) for k, v in stats.get(self.group.id, {})
- ]
- return attrs
-
- def serialize(self, obj, attrs, user, **kwargs):
- result = super().serialize(obj, attrs, user)
- result["stats"] = attrs["stats"]
- return result
diff --git a/src/sentry/api/serializers/models/event.py b/src/sentry/api/serializers/models/event.py
index 1d2255fa2e3941..2cbc423f51dc8e 100644
--- a/src/sentry/api/serializers/models/event.py
+++ b/src/sentry/api/serializers/models/event.py
@@ -172,7 +172,6 @@ class TransactionEventFields(TypedDict, total=False):
endTimestamp: datetime
measurements: Any
breakdowns: Any
- _metrics_summary: Any
class EventSerializerResponse(
@@ -377,10 +376,6 @@ def __serialize_transaction_attrs(self, attrs, obj) -> TransactionEventFields:
"breakdowns": obj.data.get("breakdowns"),
}
- # The _ reflects the temporary nature of this field.
- if (transaction_metrics_summary := obj.data.get("_metrics_summary")) is not None:
- transaction_attrs["_metrics_summary"] = transaction_metrics_summary
-
return transaction_attrs
def __serialize_error_attrs(self, attrs, obj) -> ErrorEventFields:
diff --git a/src/sentry/api/serializers/models/group.py b/src/sentry/api/serializers/models/group.py
index b229c161ccc933..4a78c94cb8f270 100644
--- a/src/sentry/api/serializers/models/group.py
+++ b/src/sentry/api/serializers/models/group.py
@@ -317,7 +317,7 @@ def get_attrs(
return result
def serialize(
- self, obj: Group, attrs: MutableMapping[str, Any], user: Any, **kwargs: Any
+ self, obj: Group, attrs: Mapping[str, Any], user: Any, **kwargs: Any
) -> BaseGroupSerializerResponse:
status_details, status_label = self._get_status(attrs, obj)
permalink = self._get_permalink(attrs, obj)
@@ -390,7 +390,7 @@ def _collapse(self, key) -> bool:
return False
return key in self.collapse
- def _get_status(self, attrs: MutableMapping[str, Any], obj: Group):
+ def _get_status(self, attrs: Mapping[str, Any], obj: Group):
status = obj.status
status_details = {}
if attrs["ignore_until"]:
@@ -850,7 +850,7 @@ def __seen_stats_impl(
class SharedGroupSerializer(GroupSerializer):
def serialize(
- self, obj: Group, attrs: MutableMapping[str, Any], user: Any, **kwargs: Any
+ self, obj: Group, attrs: Mapping[str, Any], user: Any, **kwargs: Any
) -> BaseGroupSerializerResponse:
result = super().serialize(obj, attrs, user)
diff --git a/src/sentry/api/serializers/models/organization_access_request.py b/src/sentry/api/serializers/models/organization_access_request.py
index 0e5587d295465c..f96ad98652a2a5 100644
--- a/src/sentry/api/serializers/models/organization_access_request.py
+++ b/src/sentry/api/serializers/models/organization_access_request.py
@@ -5,17 +5,41 @@
@register(OrganizationAccessRequest)
class OrganizationAccessRequestSerializer(Serializer):
- def serialize(self, obj, attrs, user, **kwargs):
- serialized_user = None
- if obj.requester_id:
- serialized_users = user_service.serialize_many(filter=dict(user_ids=[obj.requester_id]))
- if serialized_users:
- serialized_user = serialized_users[0]
+ def get_attrs(self, item_list, user, **kwargs):
+
+ serialized_requesters = user_service.serialize_many(
+ filter=dict(user_ids=[item.requester_id for item in item_list if item.requester_id])
+ )
+
+ serialized_requesters_by_id = {
+ int(requester["id"]): requester for requester in serialized_requesters
+ }
+
+ serialized_members = serialize(
+ [item.member for item in item_list],
+ user,
+ )
+
+ serialized_members_by_id = {int(member["id"]): member for member in serialized_members}
- d = {
+ serialized_teams = serialize([item.team for item in item_list], user)
+
+ serialized_teams_by_id = {int(team["id"]): team for team in serialized_teams}
+
+ return {
+ item: {
+ "requester": serialized_requesters_by_id.get(item.requester_id),
+ "member": serialized_members_by_id.get(item.member_id),
+ "team": serialized_teams_by_id.get(item.team_id),
+ }
+ for item in item_list
+ }
+
+ def serialize(self, obj, attrs, user, **kwargs):
+ serialized_access_request = {
"id": str(obj.id),
- "member": serialize(obj.member),
- "team": serialize(obj.team),
- "requester": serialized_user,
+ "member": attrs["member"],
+ "team": attrs["team"],
+ "requester": attrs["requester"],
}
- return d
+ return serialized_access_request
diff --git a/src/sentry/api/serializers/models/project.py b/src/sentry/api/serializers/models/project.py
index 840d4af67c0dce..3b98730448b3af 100644
--- a/src/sentry/api/serializers/models/project.py
+++ b/src/sentry/api/serializers/models/project.py
@@ -80,7 +80,6 @@
"servicehooks",
"similarity-embeddings",
"similarity-embeddings-delete-by-hash",
- "similarity-embeddings-backfill",
}
diff --git a/src/sentry/api/serializers/models/team.py b/src/sentry/api/serializers/models/team.py
index f64f993e5c72cd..0aa1845775702e 100644
--- a/src/sentry/api/serializers/models/team.py
+++ b/src/sentry/api/serializers/models/team.py
@@ -2,10 +2,11 @@
import dataclasses
from collections import defaultdict
-from collections.abc import Mapping, MutableMapping, MutableSequence, Sequence
+from collections.abc import Mapping, Sequence
from datetime import datetime
-from typing import TYPE_CHECKING, AbstractSet, Any, TypedDict
+from typing import TYPE_CHECKING, Any, TypedDict
+from django.contrib.auth.models import AnonymousUser
from django.db.models import Count
from sentry import roles
@@ -39,9 +40,9 @@
def _get_team_memberships(
team_list: Sequence[Team],
- user: User,
+ user: User | AnonymousUser,
optimization: SingularRpcAccessOrgOptimization | None = None,
-) -> Mapping[int, str | None]:
+) -> dict[int, str | None]:
"""Get memberships the user has in the provided team list"""
if not user.is_authenticated:
return {}
@@ -62,7 +63,7 @@ def _get_team_memberships(
}
-def get_member_totals(team_list: Sequence[Team], user: User) -> Mapping[str, int]:
+def get_member_totals(team_list: Sequence[Team], user: User | AnonymousUser) -> dict[int, int]:
"""Get the total number of members in each team"""
if not user.is_authenticated:
return {}
@@ -79,8 +80,10 @@ def get_member_totals(team_list: Sequence[Team], user: User) -> Mapping[str, int
def get_org_roles(
- org_ids: set[int], user: User, optimization: SingularRpcAccessOrgOptimization | None = None
-) -> Mapping[int, str]:
+ org_ids: set[int],
+ user: User | AnonymousUser,
+ optimization: SingularRpcAccessOrgOptimization | None = None,
+) -> dict[int, str]:
"""
Get the roles the user has in each org
"""
@@ -103,7 +106,7 @@ def get_org_roles(
}
-def get_access_requests(item_list: Sequence[Team], user: User) -> AbstractSet[Team]:
+def get_access_requests(item_list: Sequence[Team], user: User | AnonymousUser) -> frozenset[int]:
if user.is_authenticated:
return frozenset(
OrganizationAccessRequest.objects.filter(
@@ -123,7 +126,7 @@ class BaseTeamSerializerResponse(TypedDict):
id: str
slug: str
name: str
- dateCreated: datetime
+ dateCreated: datetime | None
isMember: bool
teamRole: str | None
flags: dict[str, Any]
@@ -179,9 +182,11 @@ def _collapse(self, key: str) -> bool:
def get_attrs(
self, item_list: Sequence[Team], user: User, **kwargs: Any
- ) -> MutableMapping[Team, MutableMapping[str, Any]]:
+ ) -> dict[Team, dict[str, Any]]:
+ from sentry.api.serializers.models.project import ProjectSerializer
+
request = env.request
- org_ids: set[int] = {t.organization_id for t in item_list}
+ org_ids = {t.organization_id for t in item_list}
assert len(org_ids) == 1, "Cross organization query for teams"
@@ -195,13 +200,14 @@ def get_attrs(
access_requests = get_access_requests(item_list, user)
is_superuser = request and is_active_superuser(request) and request.user == user
- result: MutableMapping[Team, MutableMapping[str, Any]] = {}
+ result: dict[Team, dict[str, Any]] = {}
organization = Organization.objects.get_from_cache(id=list(org_ids)[0])
for team in item_list:
is_member = team.id in team_memberships
org_role = roles_by_org.get(team.organization_id)
- team_role_id, team_role_scopes = team_memberships.get(team.id), set()
+ team_role_id = team_memberships.get(team.id)
+ team_role_scopes: frozenset[str] = frozenset()
has_access = bool(
is_member
@@ -240,7 +246,11 @@ def get_attrs(
projects = [pt.project for pt in project_teams]
projects_by_id = {
- project.id: data for project, data in zip(projects, serialize(projects, user))
+ project.id: data
+ for project, data in zip(
+ projects,
+ serialize(projects, user, ProjectSerializer(collapse=["unusedFeatures"])),
+ )
}
project_map = defaultdict(list)
@@ -268,7 +278,7 @@ def get_attrs(
def serialize(
self, obj: Team, attrs: Mapping[str, Any], user: Any, **kwargs: Any
) -> BaseTeamSerializerResponse:
- result: BaseTeamSerializerResponse = {
+ return {
"id": str(obj.id),
"slug": obj.slug,
"name": obj.name,
@@ -284,8 +294,6 @@ def serialize(
"avatar": {"avatarType": "letter_avatar", "avatarUuid": None},
}
- return result
-
# See TeamSerializerResponse for explanation as to why this is needed
class TeamSerializer(BaseTeamSerializer):
@@ -294,17 +302,19 @@ def serialize(
) -> TeamSerializerResponse:
result = super().serialize(obj, attrs, user, **kwargs)
+ opt: _TeamSerializerResponseOptional = {}
+
# Expandable attributes.
if self._expand("externalTeams"):
- result["externalTeams"] = attrs["externalTeams"]
+ opt["externalTeams"] = attrs["externalTeams"]
if self._expand("organization"):
- result["organization"] = serialize(obj.organization, user)
+ opt["organization"] = serialize(obj.organization, user)
if self._expand("projects"):
- result["projects"] = attrs["projects"]
+ opt["projects"] = attrs["projects"]
- return result
+ return {**result, **opt}
class TeamWithProjectsSerializer(TeamSerializer):
@@ -316,14 +326,14 @@ def __init__(self) -> None:
def get_scim_teams_members(
team_list: Sequence[Team],
-) -> MutableMapping[Team, MutableSequence[MutableMapping[str, Any]]]:
+) -> dict[Team, list[dict[str, Any]]]:
members = RangeQuerySetWrapper(
OrganizationMember.objects.filter(teams__in=team_list)
.prefetch_related("teams")
.distinct("id"),
limit=10000,
)
- member_map: MutableMapping[Team, MutableSequence[MutableMapping[str, Any]]] = defaultdict(list)
+ member_map: dict[Team, list[dict[str, Any]]] = defaultdict(list)
for member in members:
for team in member.teams.all():
member_map[team].append({"value": str(member.id), "display": member.get_email()})
@@ -382,16 +392,16 @@ def __init__(
def get_attrs(
self, item_list: Sequence[Team], user: Any, **kwargs: Any
- ) -> Mapping[Team, MutableMapping[str, Any]]:
+ ) -> dict[Team, dict[str, Any]]:
- result: MutableMapping[int, MutableMapping[str, Any]] = {
+ result: dict[int, dict[str, Any]] = {
team.id: ({"members": []} if "members" in self.expand else {}) for team in item_list
}
- teams_by_id: Mapping[int, Team] = {t.id: t for t in item_list}
+ teams_by_id = {t.id: t for t in item_list}
if teams_by_id and "members" in self.expand:
- team_ids: list[int] = [t.id for t in item_list]
- team_memberships: list[TeamMembership] = get_team_memberships(team_ids=team_ids)
+ team_ids = [t.id for t in item_list]
+ team_memberships = get_team_memberships(team_ids=team_ids)
for team_member in team_memberships:
for team_id in team_member.team_ids:
diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py
index ab5ea0cba6d58a..ab1953a40c79e0 100644
--- a/src/sentry/api/urls.py
+++ b/src/sentry/api/urls.py
@@ -178,7 +178,6 @@
GroupHashesEndpoint,
GroupNotesDetailsEndpoint,
GroupNotesEndpoint,
- GroupParticipantsEndpoint,
GroupSimilarIssuesEmbeddingsEndpoint,
GroupSimilarIssuesEndpoint,
GroupTombstoneDetailsEndpoint,
@@ -313,6 +312,11 @@
from sentry.sentry_apps.api.endpoints.sentry_internal_app_tokens import (
SentryInternalAppTokensEndpoint,
)
+from sentry.tempest.endpoints.tempest_credentials import TempestCredentialsEndpoint
+from sentry.tempest.endpoints.tempest_credentials_details import TempestCredentialsDetailsEndpoint
+from sentry.uptime.endpoints.organiation_uptime_alert_index import (
+ OrganizationUptimeAlertIndexEndpoint,
+)
from sentry.uptime.endpoints.project_uptime_alert_details import ProjectUptimeAlertDetailsEndpoint
from sentry.uptime.endpoints.project_uptime_alert_index import ProjectUptimeAlertIndexEndpoint
from sentry.users.api.endpoints.authenticator_index import AuthenticatorIndexEndpoint
@@ -792,11 +796,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
GroupFirstLastReleaseEndpoint.as_view(),
name=f"{name_prefix}-group-first-last-release",
),
- re_path(
- r"^(?P[^\/]+)/participants/$",
- GroupParticipantsEndpoint.as_view(),
- name=f"{name_prefix}-group-participants",
- ),
re_path(
r"^(?P[^\/]+)/autofix/$",
GroupAutofixEndpoint.as_view(),
@@ -2197,6 +2196,12 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
OrganizationForkEndpoint.as_view(),
name="sentry-api-0-organization-fork",
),
+ # Uptime
+ re_path(
+ r"^(?P[^\/]+)/uptime/$",
+ OrganizationUptimeAlertIndexEndpoint.as_view(),
+ name="sentry-api-0-organization-uptime-alert-index",
+ ),
]
PROJECT_URLS: list[URLPattern | URLResolver] = [
@@ -2787,6 +2792,17 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
ProjectUptimeAlertIndexEndpoint.as_view(),
name="sentry-api-0-project-uptime-alert-index",
),
+ # Tempest
+ re_path(
+ r"^(?P[^\/]+)/(?P[^\/]+)/tempest-credentials/$",
+ TempestCredentialsEndpoint.as_view(),
+ name="sentry-api-0-project-tempest-credentials",
+ ),
+ re_path(
+ r"^(?P[^\/]+)/(?P[^\/]+)/tempest-credentials/(?P\d+)/$",
+ TempestCredentialsDetailsEndpoint.as_view(),
+ name="sentry-api-0-project-tempest-credentials-details",
+ ),
*workflow_urls.urlpatterns,
]
diff --git a/src/sentry/apidocs/examples/tags_examples.py b/src/sentry/apidocs/examples/tags_examples.py
index 08325a83443976..de4cf843ea0dc5 100644
--- a/src/sentry/apidocs/examples/tags_examples.py
+++ b/src/sentry/apidocs/examples/tags_examples.py
@@ -7,17 +7,17 @@
"totalValues": 3,
"topValues": [
{
- "key": "chunkymonkey",
- "name": "Chunky Monkey",
- "value": "chunkymonkey",
+ "key": "strawberry",
+ "name": "Strawberry",
+ "value": "strawberry",
"count": 2,
"lastSeen": "2024-01-01T00:00:00Z",
"firstSeen": "2024-01-01T00:00:00Z",
},
{
- "key": "halfbaked",
- "name": "Half Baked",
- "value": "halfbaked",
+ "key": "vanilla",
+ "name": "Vanilla",
+ "value": "vanilla",
"count": 1,
"lastSeen": "2024-01-01T00:00:00Z",
"firstSeen": "2024-01-01T00:00:00Z",
@@ -25,6 +25,41 @@
],
}
+SIMPLE_TAG_VALUES = [
+ {
+ "key": "strawberry",
+ "name": "Strawberry",
+ "value": "strawberry",
+ "count": 2,
+ "lastSeen": "2024-01-01T00:00:00Z",
+ "firstSeen": "2024-01-01T00:00:00Z",
+ },
+ {
+ "key": "vanilla",
+ "name": "Vanilla",
+ "value": "vanilla",
+ "count": 1,
+ "lastSeen": "2024-01-01T00:00:00Z",
+ "firstSeen": "2024-01-01T00:00:00Z",
+ },
+ {
+ "key": "chocolate",
+ "name": "Chocolate",
+ "value": "chocolate",
+ "count": 1,
+ "lastSeen": "2024-01-01T00:00:00Z",
+ "firstSeen": "2024-01-01T00:00:00Z",
+ },
+ {
+ "key": "Neopolitan",
+ "name": "Neopolitan",
+ "value": "neopolitan",
+ "count": 1,
+ "lastSeen": "2024-01-01T00:00:00Z",
+ "firstSeen": "2024-01-01T00:00:00Z",
+ },
+]
+
class TagsExamples:
GROUP_TAGKEY_DETAILS = OpenApiExample(
@@ -33,3 +68,10 @@ class TagsExamples:
response_only=True,
status_codes=["200"],
)
+
+ GROUP_TAGKEY_VALUES = OpenApiExample(
+ "Return all tag values for a specific tag",
+ value=SIMPLE_TAG_VALUES,
+ response_only=True,
+ status_codes=["200"],
+ )
diff --git a/src/sentry/apidocs/parameters.py b/src/sentry/apidocs/parameters.py
index 863f74aa30d05e..2e77189235a0d9 100644
--- a/src/sentry/apidocs/parameters.py
+++ b/src/sentry/apidocs/parameters.py
@@ -284,6 +284,14 @@ class SCIMParams:
class IssueParams:
+ KEY = OpenApiParameter(
+ name="key",
+ location=OpenApiParameter.PATH,
+ type=OpenApiTypes.STR,
+ description="The tag key to look the values up for.",
+ required=True,
+ )
+
ISSUES_OR_GROUPS = OpenApiParameter(
name="var",
location="path",
@@ -299,6 +307,15 @@ class IssueParams:
description="The ID of the issue you'd like to query.",
)
+ SORT = OpenApiParameter(
+ name="sort",
+ location="query",
+ required=False,
+ type=str,
+ description="Sort order of the resulting tag values. Prefix with '-' for descending order. Default is '-id'.",
+ enum=["id", "date", "age", "count"],
+ )
+
class IssueAlertParams:
ISSUE_RULE_ID = OpenApiParameter(
@@ -418,6 +435,13 @@ class UptimeParams:
type=int,
description="The ID of the uptime alert rule you'd like to query.",
)
+ OWNER = OpenApiParameter(
+ name="owner",
+ location="query",
+ required=False,
+ type=str,
+ description="The owner of the uptime alert, in the format `user:id` or `team:id`. May be specified multiple times.",
+ )
class EventParams:
@@ -445,6 +469,41 @@ class EventParams:
description="Index of the exception that should be used for source map resolution.",
)
+ EVENT_ID_EXTENDED = OpenApiParameter(
+ name="event_id",
+ type=OpenApiTypes.STR,
+ location=OpenApiParameter.PATH,
+ description="The ID of the event to retrieve, or 'latest', 'oldest', or 'recommended'.",
+ required=True,
+ enum=["latest", "oldest", "recommended"],
+ )
+
+ FULL_PAYLOAD = OpenApiParameter(
+ name="full",
+ type=OpenApiTypes.BOOL,
+ location=OpenApiParameter.QUERY,
+ description="Specify true to include the full event body, including the stacktrace, in the event payload.",
+ required=False,
+ default=False,
+ )
+
+ SAMPLE = OpenApiParameter(
+ name="sample",
+ type=OpenApiTypes.BOOL,
+ location=OpenApiParameter.QUERY,
+ description="Return events in pseudo-random order. This is deterministic so an identical query will always return the same events in the same order.",
+ required=False,
+ default=False,
+ )
+
+ QUERY = OpenApiParameter(
+ name="query",
+ location=OpenApiParameter.QUERY,
+ type=OpenApiTypes.STR,
+ description="An optional search query for filtering events.",
+ required=False,
+ )
+
class ProjectParams:
FILTER_ID = OpenApiParameter(
diff --git a/src/sentry/auth/services/auth/model.py b/src/sentry/auth/services/auth/model.py
index d0bbd928b60133..82254cc4bad5ad 100644
--- a/src/sentry/auth/services/auth/model.py
+++ b/src/sentry/auth/services/auth/model.py
@@ -22,7 +22,7 @@
class RpcApiKey(RpcModel):
id: int = -1
organization_id: int = -1
- key: str = ""
+ key: str = Field(repr=False, default="")
status: int = 0
allowed_origins: list[str] = Field(default_factory=list)
label: str = ""
@@ -35,8 +35,8 @@ class RpcApiToken(RpcModel):
organization_id: int | None = None
application_id: int | None = None
application_is_active: bool = False
- token: str = ""
- hashed_token: str | None = None
+ token: str = Field(repr=False, default="")
+ hashed_token: str | None = Field(repr=False, default=None)
expires_at: datetime.datetime | None = None
allowed_origins: list[str] = Field(default_factory=list)
scope_list: list[str] = Field(default_factory=list)
diff --git a/src/sentry/auth/superuser.py b/src/sentry/auth/superuser.py
index bdf0921bdcd823..fa48351cad52e6 100644
--- a/src/sentry/auth/superuser.py
+++ b/src/sentry/auth/superuser.py
@@ -155,12 +155,6 @@ class SuperuserAccessFormInvalidJson(SentryAPIException):
message = "The request contains invalid json"
-class EmptySuperuserAccessForm(SentryAPIException):
- status_code = status.HTTP_400_BAD_REQUEST
- code = "empty-superuser-access-form"
- message = "The request contains an empty superuser access form data"
-
-
class Superuser(ElevatedMode):
allowed_ips = frozenset(ipaddress.ip_network(str(v), strict=False) for v in ALLOWED_IPS)
org_id = SUPERUSER_ORG_ID
@@ -456,13 +450,6 @@ def enable_and_log_superuser_access():
tags={"reason": SuperuserAccessFormInvalidJson.code},
)
raise SuperuserAccessFormInvalidJson()
- except AttributeError:
- metrics.incr(
- "superuser.failure",
- sample_rate=1.0,
- tags={"reason": EmptySuperuserAccessForm.code},
- )
- raise EmptySuperuserAccessForm()
su_access_info = SuperuserAccessSerializer(data=su_access_json)
diff --git a/src/sentry/backup/comparators.py b/src/sentry/backup/comparators.py
index 840c3d85a73e53..3d4cfc0eca942a 100644
--- a/src/sentry/backup/comparators.py
+++ b/src/sentry/backup/comparators.py
@@ -903,6 +903,9 @@ def get_default_comparators() -> dict[str, list[JSONScrubbingComparator]]:
"workflow_engine.alertruletriggerdatacondition": [
DateUpdatedComparator("date_updated", "date_added")
],
+ "tempest.tempestcredentials": [
+ DateUpdatedComparator("date_updated", "date_added"),
+ ],
},
)
diff --git a/src/sentry/backup/dependencies.py b/src/sentry/backup/dependencies.py
index 4a7184e1187993..e0cf4a543dd356 100644
--- a/src/sentry/backup/dependencies.py
+++ b/src/sentry/backup/dependencies.py
@@ -95,28 +95,23 @@ def __repr__(self) -> str:
#
# TODO(getsentry/team-ospo#190): We should find a better way to store this information than a magic
# list in this file. We should probably make a field (or method?) on `BaseModel` instead.
-@unique
-class RelocationRootModels(Enum):
- """
- Record the "root" models for a given `RelocationScope`.
- """
-
- Excluded: list[NormalizedModelName] = []
- User = [NormalizedModelName("sentry.user")]
- Organization = [NormalizedModelName("sentry.organization")]
- Config = [
- NormalizedModelName("sentry.controloption"),
- NormalizedModelName("sentry.option"),
- NormalizedModelName("sentry.relay"),
- NormalizedModelName("sentry.relayusage"),
- NormalizedModelName("sentry.userrole"),
- ]
+_ROOT_MODELS: tuple[NormalizedModelName, ...] = (
+ # RelocationScope.User
+ NormalizedModelName("sentry.user"),
+ # RelocationScope.Organization
+ NormalizedModelName("sentry.organization"),
+ # RelocationScope.Config
+ NormalizedModelName("sentry.controloption"),
+ NormalizedModelName("sentry.option"),
+ NormalizedModelName("sentry.relay"),
+ NormalizedModelName("sentry.relayusage"),
+ NormalizedModelName("sentry.userrole"),
+ # RelocationScope.Global
# TODO(getsentry/team-ospo#188): Split out extension scope root models from this list.
- Global = [
- NormalizedModelName("sentry.apiapplication"),
- NormalizedModelName("sentry.integration"),
- NormalizedModelName("sentry.sentryapp"),
- ]
+ NormalizedModelName("sentry.apiapplication"),
+ NormalizedModelName("sentry.integration"),
+ NormalizedModelName("sentry.sentryapp"),
+)
@unique
@@ -433,6 +428,10 @@ def dependencies() -> dict[NormalizedModelName, ModelRelations]:
if model._meta.app_label in {"sessions", "sites", "test", "getsentry"}:
continue
+ # exclude proxy models since the backup test is already done on a parent if needed
+ if model._meta.proxy:
+ continue
+
foreign_keys: dict[str, ForeignField] = dict()
uniques: set[frozenset[str]] = {
frozenset(combo) for combo in model._meta.unique_together
@@ -538,10 +537,7 @@ def dependencies() -> dict[NormalizedModelName, ModelRelations]:
)
# Get a flat list of "root" models, then mark all of them as non-dangling.
- relocation_root_models: list[NormalizedModelName] = []
- for root_models in RelocationRootModels:
- relocation_root_models.extend(root_models.value)
- for model_name in relocation_root_models:
+ for model_name in _ROOT_MODELS:
model_dependencies_dict[model_name].dangling = False
# TODO(getsentry/team-ospo#190): In practice, we can treat `AlertRule`'s dependency on
diff --git a/src/sentry/conf/api_pagination_allowlist_do_not_modify.py b/src/sentry/conf/api_pagination_allowlist_do_not_modify.py
index d3ad9bce77bc6a..ab3353d9b670b1 100644
--- a/src/sentry/conf/api_pagination_allowlist_do_not_modify.py
+++ b/src/sentry/conf/api_pagination_allowlist_do_not_modify.py
@@ -16,7 +16,6 @@
"GitlabIssueSearchEndpoint",
"GroupEventsEndpoint",
"GroupIntegrationsEndpoint",
- "GroupParticipantsEndpoint",
"GroupSimilarIssuesEmbeddingsEndpoint",
"GroupStatsEndpoint",
"GroupTagsEndpoint",
diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py
index a0ea57ef0ad929..0d0ed2a0d40e50 100644
--- a/src/sentry/conf/server.py
+++ b/src/sentry/conf/server.py
@@ -21,12 +21,13 @@
SENTRY_API_PAGINATION_ALLOWLIST_DO_NOT_MODIFY,
)
from sentry.conf.types.celery import SplitQueueSize, SplitQueueTaskRoute
-from sentry.conf.types.kafka_definition import ConsumerDefinition
+from sentry.conf.types.kafka_definition import ConsumerDefinition, Topic
from sentry.conf.types.logging_config import LoggingConfig
from sentry.conf.types.role_dict import RoleDict
from sentry.conf.types.sdk_config import ServerSdkConfig
from sentry.conf.types.sentry_config import SentryMode
from sentry.conf.types.service_options import ServiceOptions
+from sentry.conf.types.uptime import UptimeRegionConfig
from sentry.utils import json # NOQA (used in getsentry config)
from sentry.utils.celery import crontab_with_minute_jitter, make_split_task_queues
from sentry.utils.types import Type, type_from_value
@@ -405,6 +406,7 @@ def env(
"sentry.flags",
"sentry.monitors",
"sentry.uptime",
+ "sentry.tempest",
"sentry.replays",
"sentry.release_health",
"sentry.search",
@@ -835,9 +837,6 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str:
"sentry.integrations.tasks",
)
-# tmp(michal): Default configuration for post_process* queues split
-SENTRY_POST_PROCESS_QUEUE_SPLIT_ROUTER: dict[str, Callable[[], str]] = {}
-
# Enable split queue routing
CELERY_ROUTES = ("sentry.queue.routers.SplitQueueTaskRouter",)
@@ -854,7 +853,14 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str:
"total": 3,
"in_use": 3,
},
- }
+ },
+ "sentry.profiles.task.process_profile": {
+ "default_queue": "profiles.process",
+ "queues_config": {
+ "total": 3,
+ "in_use": 3,
+ },
+ },
}
CELERY_SPLIT_TASK_QUEUES_REGION = make_split_task_queues(CELERY_SPLIT_QUEUE_TASK_ROUTES_REGION)
@@ -945,6 +951,7 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str:
"dynamicsampling",
routing_key="dynamicsampling",
),
+ Queue("tempest", routing_key="tempest"),
Queue("incidents", routing_key="incidents"),
Queue("incident_snapshots", routing_key="incident_snapshots"),
Queue("incidents", routing_key="incidents"),
@@ -1225,8 +1232,8 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str:
},
"weekly-escalating-forecast": {
"task": "sentry.tasks.weekly_escalating_forecast.run_escalating_forecast",
- # Run every 6 hours
- "schedule": crontab(minute="0", hour="*/6"),
+ # Run once a day at 00:00
+ "schedule": crontab(minute="0", hour="0"),
"options": {"expires": 60 * 60 * 3},
},
"schedule_auto_transition_to_ongoing": {
@@ -1332,7 +1339,10 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str:
# The list of modules that workers will import after starting up
# Like celery, taskworkers need to import task modules to make tasks
# accessible to the worker.
-TASKWORKER_IMPORTS: tuple[str, ...] = ()
+TASKWORKER_IMPORTS: tuple[str, ...] = (
+ # Used for tests
+ "sentry.taskworker.tasks.examples",
+)
TASKWORKER_ROUTER: str = "sentry.taskworker.router.DefaultRouter"
TASKWORKER_ROUTES: dict[str, str] = {}
@@ -1461,7 +1471,18 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
"PARSER_WHITELIST": ["rest_framework.parsers.JSONParser"],
"POSTPROCESSING_HOOKS": ["sentry.apidocs.hooks.custom_postprocessing_hook"],
"PREPROCESSING_HOOKS": ["sentry.apidocs.hooks.custom_preprocessing_hook"],
- "SERVERS": [{"url": "https://us.sentry.io"}, {"url": "https://de.sentry.io"}],
+ "SERVERS": [
+ {
+ "url": "https://{region}.sentry.io",
+ "variables": {
+ "region": {
+ "default": "us",
+ "description": "The data-storage-location for an organization",
+ "enum": ["us", "de"],
+ },
+ },
+ },
+ ],
"SORT_OPERATION_PARAMETERS": custom_parameter_sort,
"TAGS": OPENAPI_TAGS,
"TITLE": "API Reference",
@@ -2232,6 +2253,9 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
# This flag activates uptime checks in the developemnt environment
SENTRY_USE_UPTIME = False
+# This flag activates the taskbroker in devservices
+SENTRY_USE_TASKBROKER = False
+
# SENTRY_DEVSERVICES = {
# "service-name": lambda settings, options: (
# {
@@ -2412,6 +2436,21 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
"platform": "linux/amd64",
}
),
+ "taskbroker": lambda settings, options: (
+ {
+ "image": "ghcr.io/getsentry/taskbroker:latest",
+ "ports": {"50051/tcp": 50051},
+ "environment": {
+ "TASKBROKER_KAFKA_CLUSTER": (
+ "kafka-kafka-1"
+ if os.environ.get("USE_NEW_DEVSERVICES") == "1"
+ else "sentry_kafka"
+ ),
+ },
+ "only_if": settings.SENTRY_USE_TASKBROKER,
+ "platform": "linux/amd64",
+ }
+ ),
"bigtable": lambda settings, options: (
{
"image": "ghcr.io/getsentry/cbtemulator:d28ad6b63e461e8c05084b8c83f1c06627068c04",
@@ -2493,7 +2532,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
SENTRY_SELF_HOSTED_ERRORS_ONLY = False
# only referenced in getsentry to provide the stable beacon version
# updated with scripts/bump-version.sh
-SELF_HOSTED_STABLE_VERSION = "24.11.1"
+SELF_HOSTED_STABLE_VERSION = "24.12.1"
# Whether we should look at X-Forwarded-For header or not
# when checking REMOTE_ADDR ip addresses
@@ -2940,6 +2979,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
"remote_subscriptions",
"uptime",
"workflow_engine",
+ "tempest",
)
# Where to write the lockfile to.
MIGRATIONS_LOCKFILE_PATH = os.path.join(PROJECT_ROOT, os.path.pardir, os.path.pardir)
@@ -3354,7 +3394,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
"sentry-api-0-group-integrations",
"sentry-api-0-group-integration-details",
"sentry-api-0-group-current-release",
- "sentry-api-0-group-participants",
"sentry-api-0-shared-group-details",
# Unscoped profiling URLs
"sentry-api-0-profiling-project-profile",
@@ -3408,6 +3447,16 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
SIMILARITY_BACKFILL_COHORT_MAP: dict[str, list[int]] = {}
+UPTIME_REGIONS = [
+ UptimeRegionConfig(
+ slug="default",
+ name="Default Region",
+ config_topic=Topic.UPTIME_CONFIGS,
+ enabled=True,
+ ),
+]
+
+
# Devserver configuration overrides.
ngrok_host = os.environ.get("SENTRY_DEVSERVER_NGROK")
if ngrok_host:
diff --git a/src/sentry/conf/types/kafka_definition.py b/src/sentry/conf/types/kafka_definition.py
index 59ae1228343494..f0013025c3de45 100644
--- a/src/sentry/conf/types/kafka_definition.py
+++ b/src/sentry/conf/types/kafka_definition.py
@@ -86,6 +86,8 @@ class ConsumerDefinition(TypedDict, total=False):
dlq_max_invalid_ratio: float | None
dlq_max_consecutive_count: int | None
+ stale_topic: Topic
+
def validate_consumer_definition(consumer_definition: ConsumerDefinition) -> None:
if "dlq_topic" not in consumer_definition and (
diff --git a/src/sentry/conf/types/uptime.py b/src/sentry/conf/types/uptime.py
new file mode 100644
index 00000000000000..2e645848a70ae4
--- /dev/null
+++ b/src/sentry/conf/types/uptime.py
@@ -0,0 +1,15 @@
+import dataclasses
+
+from sentry.conf.types.kafka_definition import Topic
+
+
+@dataclasses.dataclass
+class UptimeRegionConfig:
+ """
+ Defines a region which uptime checks can be run in.
+ """
+
+ slug: str
+ name: str
+ config_topic: Topic
+ enabled: bool
diff --git a/src/sentry/consumers/__init__.py b/src/sentry/consumers/__init__.py
index bb21f85fadb759..cbd369fcc298fc 100644
--- a/src/sentry/consumers/__init__.py
+++ b/src/sentry/consumers/__init__.py
@@ -6,11 +6,10 @@
import click
from arroyo.backends.abstract import Consumer
-from arroyo.backends.kafka import KafkaProducer
from arroyo.backends.kafka.configuration import build_kafka_consumer_configuration
from arroyo.backends.kafka.consumer import KafkaConsumer
from arroyo.commit import ONCE_PER_SECOND
-from arroyo.dlq import DlqLimit, DlqPolicy, KafkaDlqProducer
+from arroyo.dlq import DlqPolicy
from arroyo.processing.processor import StreamProcessor
from arroyo.processing.strategies import Healthcheck
from arroyo.processing.strategies.abstract import ProcessingStrategy, ProcessingStrategyFactory
@@ -22,11 +21,12 @@
Topic,
validate_consumer_definition,
)
+from sentry.consumers.dlq import DlqStaleMessagesStrategyFactoryWrapper, maybe_build_dlq_producer
from sentry.consumers.validate_schema import ValidateSchema
from sentry.eventstream.types import EventStreamEventType
from sentry.ingest.types import ConsumerType
from sentry.utils.imports import import_string
-from sentry.utils.kafka_config import get_kafka_producer_cluster_options, get_topic_definition
+from sentry.utils.kafka_config import get_topic_definition
logger = logging.getLogger(__name__)
@@ -135,6 +135,37 @@ def ingest_monitors_options() -> list[click.Option]:
return options
+def uptime_options() -> list[click.Option]:
+ """Return a list of uptime-results options."""
+ options = [
+ click.Option(
+ ["--mode", "mode"],
+ type=click.Choice(["serial", "parallel"]),
+ default="serial",
+ help="The mode to process results in. Parallel uses multithreading.",
+ ),
+ click.Option(
+ ["--max-batch-size", "max_batch_size"],
+ type=int,
+ default=500,
+ help="Maximum number of results to batch before processing in parallel.",
+ ),
+ click.Option(
+ ["--max-batch-time", "max_batch_time"],
+ type=int,
+ default=1,
+ help="Maximum time spent batching results to batch before processing in parallel.",
+ ),
+ click.Option(
+ ["--max-workers", "max_workers"],
+ type=int,
+ default=None,
+ help="The maximum number of threads to spawn in parallel mode.",
+ ),
+ ]
+ return options
+
+
def ingest_events_options() -> list[click.Option]:
"""
Options for the "events"-like consumers: `events`, `attachments`, `transactions`.
@@ -263,6 +294,7 @@ def ingest_transactions_options() -> list[click.Option]:
"uptime-results": {
"topic": Topic.UPTIME_RESULTS,
"strategy_factory": "sentry.uptime.consumers.results_consumer.UptimeResultsStrategyFactory",
+ "click_options": uptime_options(),
},
"billing-metrics-consumer": {
"topic": Topic.SNUBA_GENERIC_METRICS,
@@ -315,6 +347,7 @@ def ingest_transactions_options() -> list[click.Option]:
"consumer_type": ConsumerType.Events,
},
"dlq_topic": Topic.INGEST_EVENTS_DLQ,
+ "stale_topic": Topic.INGEST_EVENTS_DLQ,
},
"ingest-feedback-events": {
"topic": Topic.INGEST_FEEDBACK_EVENTS,
@@ -339,6 +372,7 @@ def ingest_transactions_options() -> list[click.Option]:
"strategy_factory": "sentry.ingest.consumer.factory.IngestTransactionsStrategyFactory",
"click_options": ingest_transactions_options(),
"dlq_topic": Topic.INGEST_TRANSACTIONS_DLQ,
+ "stale_topic": Topic.INGEST_TRANSACTIONS_DLQ,
},
"ingest-metrics": {
"topic": Topic.INGEST_METRICS,
@@ -348,8 +382,6 @@ def ingest_transactions_options() -> list[click.Option]:
"ingest_profile": "release-health",
},
"dlq_topic": Topic.INGEST_METRICS_DLQ,
- "dlq_max_invalid_ratio": 0.01,
- "dlq_max_consecutive_count": 1000,
},
"ingest-generic-metrics": {
"topic": Topic.INGEST_PERFORMANCE_METRICS,
@@ -359,8 +391,6 @@ def ingest_transactions_options() -> list[click.Option]:
"ingest_profile": "performance",
},
"dlq_topic": Topic.INGEST_GENERIC_METRICS_DLQ,
- "dlq_max_invalid_ratio": None,
- "dlq_max_consecutive_count": None,
},
"generic-metrics-last-seen-updater": {
"topic": Topic.SNUBA_GENERIC_METRICS,
@@ -423,15 +453,6 @@ def ingest_transactions_options() -> list[click.Option]:
}
-def print_deprecation_warning(name, group_id):
- import click
-
- click.echo(
- f"WARNING: Deprecated command, use sentry run consumer {name} "
- f"--consumer-group {group_id} ..."
- )
-
-
def get_stream_processor(
consumer_name: str,
consumer_args: Sequence[str],
@@ -446,6 +467,8 @@ def get_stream_processor(
synchronize_commit_group: str | None = None,
healthcheck_file_path: str | None = None,
enable_dlq: bool = True,
+ # If set, messages above this age will be rerouted to the stale topic if one is configured
+ stale_threshold_sec: int | None = None,
enforce_schema: bool = False,
group_instance_id: str | None = None,
) -> StreamProcessor:
@@ -555,37 +578,35 @@ def build_consumer_config(group_id: str):
consumer_topic.value, enforce_schema, strategy_factory
)
+ if stale_threshold_sec:
+ strategy_factory = DlqStaleMessagesStrategyFactoryWrapper(
+ stale_threshold_sec, strategy_factory
+ )
+
if healthcheck_file_path is not None:
strategy_factory = HealthcheckStrategyFactoryWrapper(
healthcheck_file_path, strategy_factory
)
if enable_dlq and consumer_definition.get("dlq_topic"):
- try:
- dlq_topic = consumer_definition["dlq_topic"]
- except KeyError as e:
- raise click.BadParameter(
- f"Cannot enable DLQ for consumer: {consumer_name}, no DLQ topic has been defined for it"
- ) from e
- try:
- dlq_topic_defn = get_topic_definition(dlq_topic)
- cluster_setting = dlq_topic_defn["cluster"]
- except ValueError as e:
- raise click.BadParameter(
- f"Cannot enable DLQ for consumer: {consumer_name}, DLQ topic {dlq_topic} is not configured in this environment"
- ) from e
+ dlq_topic = consumer_definition["dlq_topic"]
+ else:
+ dlq_topic = None
- producer_config = get_kafka_producer_cluster_options(cluster_setting)
- dlq_producer = KafkaProducer(producer_config)
+ if stale_threshold_sec and consumer_definition.get("stale_topic"):
+ stale_topic = consumer_definition["stale_topic"]
+ else:
+ stale_topic = None
+ dlq_producer = maybe_build_dlq_producer(dlq_topic=dlq_topic, stale_topic=stale_topic)
+
+ if dlq_producer:
dlq_policy = DlqPolicy(
- KafkaDlqProducer(dlq_producer, ArroyoTopic(dlq_topic_defn["real_topic_name"])),
- DlqLimit(
- max_invalid_ratio=consumer_definition.get("dlq_max_invalid_ratio"),
- max_consecutive_count=consumer_definition.get("dlq_max_consecutive_count"),
- ),
+ dlq_producer,
+ None,
None,
)
+
else:
dlq_policy = None
diff --git a/src/sentry/consumers/dlq.py b/src/sentry/consumers/dlq.py
new file mode 100644
index 00000000000000..4e8ea5f7939d25
--- /dev/null
+++ b/src/sentry/consumers/dlq.py
@@ -0,0 +1,163 @@
+import logging
+import time
+from collections.abc import Mapping, MutableMapping
+from concurrent.futures import Future
+from datetime import datetime, timedelta, timezone
+from enum import Enum
+
+from arroyo.backends.kafka import KafkaPayload, KafkaProducer
+from arroyo.dlq import InvalidMessage, KafkaDlqProducer
+from arroyo.processing.strategies.abstract import ProcessingStrategy, ProcessingStrategyFactory
+from arroyo.types import FILTERED_PAYLOAD, BrokerValue, Commit, FilteredPayload, Message, Partition
+from arroyo.types import Topic as ArroyoTopic
+from arroyo.types import Value
+
+from sentry.conf.types.kafka_definition import Topic
+from sentry.utils.kafka_config import get_kafka_producer_cluster_options, get_topic_definition
+
+logger = logging.getLogger(__name__)
+
+
+class RejectReason(Enum):
+ STALE = "stale"
+ INVALID = "invalid"
+
+
+class MultipleDestinationDlqProducer(KafkaDlqProducer):
+ """
+ Produces to either the DLQ or stale message topic depending on the reason.
+ """
+
+ def __init__(
+ self,
+ producers: Mapping[RejectReason, KafkaDlqProducer | None],
+ ) -> None:
+ self.producers = producers
+
+ def produce(
+ self,
+ value: BrokerValue[KafkaPayload],
+ reason: str | None = None,
+ ) -> Future[BrokerValue[KafkaPayload]]:
+
+ reject_reason = RejectReason(reason) if reason else RejectReason.INVALID
+ producer = self.producers.get(reject_reason)
+
+ if producer:
+ return producer.produce(value)
+ else:
+ # No DLQ producer configured for the reason.
+ logger.error("No DLQ producer configured for reason %s", reason)
+ future: Future[BrokerValue[KafkaPayload]] = Future()
+ future.set_running_or_notify_cancel()
+ future.set_result(value)
+ return future
+
+
+def _get_dlq_producer(topic: Topic | None) -> KafkaDlqProducer | None:
+ if topic is None:
+ return None
+
+ topic_defn = get_topic_definition(topic)
+ config = get_kafka_producer_cluster_options(topic_defn["cluster"])
+ real_topic = topic_defn["real_topic_name"]
+ return KafkaDlqProducer(KafkaProducer(config), ArroyoTopic(real_topic))
+
+
+def maybe_build_dlq_producer(
+ dlq_topic: Topic | None,
+ stale_topic: Topic | None,
+) -> MultipleDestinationDlqProducer | None:
+ if dlq_topic is None and stale_topic is None:
+ return None
+
+ producers = {
+ RejectReason.INVALID: _get_dlq_producer(dlq_topic),
+ RejectReason.STALE: _get_dlq_producer(stale_topic),
+ }
+
+ return MultipleDestinationDlqProducer(producers)
+
+
+class DlqStaleMessages(ProcessingStrategy[KafkaPayload]):
+ def __init__(
+ self,
+ stale_threshold_sec: int,
+ next_step: ProcessingStrategy[KafkaPayload | FilteredPayload],
+ ) -> None:
+ self.stale_threshold_sec = stale_threshold_sec
+ self.next_step = next_step
+
+ # A filtered message is created so we commit periodically if all are stale.
+ self.last_forwarded_offsets = time.time()
+ self.offsets_to_forward: MutableMapping[Partition, int] = {}
+
+ def submit(self, message: Message[KafkaPayload]) -> None:
+ min_accepted_timestamp = datetime.now(timezone.utc) - timedelta(
+ seconds=self.stale_threshold_sec
+ )
+
+ if isinstance(message.value, BrokerValue):
+ # Normalize the message timezone to be UTC
+ if message.value.timestamp.tzinfo is None:
+ message_timestamp = message.value.timestamp.replace(tzinfo=timezone.utc)
+ else:
+ message_timestamp = message.value.timestamp
+
+ if message_timestamp < min_accepted_timestamp:
+ self.offsets_to_forward[message.value.partition] = message.value.next_offset
+ raise InvalidMessage(
+ message.value.partition,
+ message.value.offset,
+ reason=RejectReason.STALE.value,
+ log_exception=False,
+ )
+
+ # If we get a valid message for a partition later, don't emit a filtered message for it
+ if self.offsets_to_forward:
+ for partition in message.committable:
+ self.offsets_to_forward.pop(partition, None)
+
+ self.next_step.submit(message)
+
+ def poll(self) -> None:
+ self.next_step.poll()
+
+ # Ensure we commit frequently even if all messages are invalid
+ if self.offsets_to_forward:
+ if time.time() > self.last_forwarded_offsets + 1:
+ filtered_message = Message(Value(FILTERED_PAYLOAD, self.offsets_to_forward))
+ self.next_step.submit(filtered_message)
+ self.offsets_to_forward = {}
+ self.last_forwarded_offsets = time.time()
+
+ def join(self, timeout: float | None = None) -> None:
+ self.next_step.join(timeout)
+
+ def close(self) -> None:
+ self.next_step.close()
+
+ def terminate(self) -> None:
+ self.next_step.terminate()
+
+
+class DlqStaleMessagesStrategyFactoryWrapper(ProcessingStrategyFactory[KafkaPayload]):
+ """
+ Wrapper used to dlq a message with a stale timestamp before it is passed to
+ the rest of the pipeline. The InvalidMessage is raised with a
+ "stale" reason so it can be routed to a separate stale topic.
+ """
+
+ def __init__(
+ self,
+ stale_threshold_sec: int,
+ inner: ProcessingStrategyFactory[KafkaPayload | FilteredPayload],
+ ) -> None:
+ self.stale_threshold_sec = stale_threshold_sec
+ self.inner = inner
+
+ def create_with_partitions(
+ self, commit: Commit, partitions: Mapping[Partition, int]
+ ) -> ProcessingStrategy[KafkaPayload]:
+ rv = self.inner.create_with_partitions(commit, partitions)
+ return DlqStaleMessages(self.stale_threshold_sec, rv)
diff --git a/src/sentry/data_secrecy/api/waive_data_secrecy.py b/src/sentry/data_secrecy/api/waive_data_secrecy.py
index 5f59ba863423cf..995f3337fa5934 100644
--- a/src/sentry/data_secrecy/api/waive_data_secrecy.py
+++ b/src/sentry/data_secrecy/api/waive_data_secrecy.py
@@ -150,6 +150,5 @@ def delete(self, request: Request, organization: Organization):
event=audit_log.get_event_id("DATA_SECRECY_REINSTATED"),
)
return Response(
- {"detail": "Data secrecy has been reinstated."},
status=status.HTTP_204_NO_CONTENT,
)
diff --git a/src/sentry/debug/utils/function_wrapper.py b/src/sentry/debug/utils/function_wrapper.py
deleted file mode 100644
index 433373faf9cfbf..00000000000000
--- a/src/sentry/debug/utils/function_wrapper.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from time import time
-
-
-class FunctionWrapper:
- def __init__(self, collector):
- self.collector = collector
-
- def __call__(self, func, *args, **kwargs):
- __traceback_hide__ = True # NOQA
-
- start = time()
- try:
- return func(*args, **kwargs)
- finally:
- end = time()
-
- if getattr(func, "im_class", None):
- arg_str = repr(args[1:])
- else:
- arg_str = repr(args)
-
- data = {
- "name": func.__name__,
- "args": arg_str,
- "kwargs": repr(kwargs),
- "start": start,
- "end": end,
- }
- self.record(data)
-
- def record(self, data):
- self.collector.append(data)
diff --git a/src/sentry/dynamic_sampling/tasks/custom_rule_notifications.py b/src/sentry/dynamic_sampling/tasks/custom_rule_notifications.py
index b4e7c79c37608c..c7de2adb66c0f0 100644
--- a/src/sentry/dynamic_sampling/tasks/custom_rule_notifications.py
+++ b/src/sentry/dynamic_sampling/tasks/custom_rule_notifications.py
@@ -167,6 +167,8 @@ def create_discover_link(rule: CustomDynamicSamplingRule, projects: list[int]) -
q["utc"] = "true"
q["yAxis"] = "count()"
q["sort"] = "-timestamp"
+ q["queryDataset"] = "transaction-like"
+ q["dataset"] = "transactions"
query_string = q.urlencode()
discover_url = rule.organization.absolute_url(
diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py
index bbb6d1dd9b3a07..790433a11f895a 100644
--- a/src/sentry/event_manager.py
+++ b/src/sentry/event_manager.py
@@ -185,10 +185,6 @@ def get_tag(data: dict[str, Any], key: str) -> Any | None:
return None
-def is_sample_event(job):
- return get_tag(job["data"], "sample_event") == "yes"
-
-
def sdk_metadata_from_event(event: Event) -> Mapping[str, Any]:
"""
Returns a metadata dictionary with "sdk" field populated, including a normalized name
@@ -350,7 +346,7 @@ def __init__(
grouping_config = config.get("grouping_config")
# if we still don't have a grouping also try the project
if grouping_config is None and project is not None:
- grouping_config = get_grouping_config_dict_for_project(self._project)
+ grouping_config = get_grouping_config_dict_for_project(project)
self._grouping_config = grouping_config
self._client_ip = client_ip
self._user_agent = user_agent
@@ -2380,7 +2376,7 @@ def save_attachment(
return
from sentry import ratelimits as ratelimiter
- is_limited, num_requests, reset_time = ratelimiter.backend.is_limited_with_value(
+ is_limited, _, _ = ratelimiter.backend.is_limited_with_value(
key="event_attachment.save_per_sec",
limit=options.get("sentry.save-event-attachments.project-per-sec-limit"),
project=project,
@@ -2388,7 +2384,7 @@ def save_attachment(
)
rate_limit_tag = "per_sec"
if not is_limited:
- is_limited, num_requests, reset_time = ratelimiter.backend.is_limited_with_value(
+ is_limited, _, _ = ratelimiter.backend.is_limited_with_value(
key="event_attachment.save_5_min",
limit=options.get("sentry.save-event-attachments.project-per-5-minute-limit"),
project=project,
diff --git a/src/sentry/eventstore/base.py b/src/sentry/eventstore/base.py
index e645cade39e658..f1a375f1cc80cb 100644
--- a/src/sentry/eventstore/base.py
+++ b/src/sentry/eventstore/base.py
@@ -292,7 +292,7 @@ def get_adjacent_event_ids(self, event, filter):
"""
raise NotImplementedError
- def create_event(self, project_id=None, event_id=None, group_id=None, data=None):
+ def create_event(self, *, project_id: int, event_id=None, group_id=None, data=None):
"""
Returns an Event from processed data
"""
diff --git a/src/sentry/eventstore/models.py b/src/sentry/eventstore/models.py
index 67a49708ce2a03..67c912449ef825 100644
--- a/src/sentry/eventstore/models.py
+++ b/src/sentry/eventstore/models.py
@@ -294,10 +294,7 @@ def project(self) -> Project:
@project.setter
def project(self, project: Project) -> None:
- if project is None:
- self.project_id = None
- else:
- self.project_id = project.id
+ self.project_id = project.id
self._project_cache = project
@cached_property
@@ -339,14 +336,23 @@ def get_hashes_and_variants(
"""
variants = self.get_grouping_variants(config)
+ hashes_by_variant = {
+ variant_name: variant.get_hash() for variant_name, variant in variants.items()
+ }
+
# Sort the variants so that the system variant (if any) is always last, in order to resolve
# ambiguities when choosing primary_hash for Snuba
- sorted_variants = sorted(
- variants.items(),
- key=lambda name_and_variant: 1 if name_and_variant[0] == "system" else 0,
+ sorted_variant_names = sorted(
+ variants,
+ key=lambda variant_name: 1 if variant_name == "system" else 0,
)
+
# Get each variant's hash value, filtering out Nones
- hashes = list({variant.get_hash() for _, variant in sorted_variants} - {None})
+ hashes = [
+ hashes_by_variant[variant_name]
+ for variant_name in sorted_variant_names
+ if hashes_by_variant[variant_name] is not None
+ ]
# Write to event before returning
self.data["hashes"] = hashes
diff --git a/src/sentry/eventstream/kafka/backend.py b/src/sentry/eventstream/kafka/backend.py
index 02954e35ce7588..e9b6e8bda5b1e5 100644
--- a/src/sentry/eventstream/kafka/backend.py
+++ b/src/sentry/eventstream/kafka/backend.py
@@ -128,17 +128,6 @@ def insert(
) -> None:
event_type = self._get_event_type(event)
- if event.get_tag("sample_event"):
- logger.info(
- "insert: inserting event in KafkaEventStream",
- extra={
- "event.id": event.event_id,
- "project_id": event.project_id,
- "sample_event": True,
- "event_type": event_type.value,
- },
- )
-
assign_partitions_randomly = (
(event_type == EventStreamEventType.Generic)
or (event_type == EventStreamEventType.Transaction)
@@ -152,14 +141,6 @@ def insert(
kwargs[KW_SKIP_SEMANTIC_PARTITIONING] = True
if event.get_tag("sample_event"):
- logger.info(
- "insert: inserting event in SnubaProtocolEventStream",
- extra={
- "event.id": event.event_id,
- "project_id": event.project_id,
- "sample_event": True,
- },
- )
kwargs["asynchronous"] = False
super().insert(
diff --git a/src/sentry/eventstream/snuba.py b/src/sentry/eventstream/snuba.py
index bdc1835724c751..c3aa891cbf868b 100644
--- a/src/sentry/eventstream/snuba.py
+++ b/src/sentry/eventstream/snuba.py
@@ -112,15 +112,6 @@ def insert(
eventstream_type: str | None = None,
**kwargs: Any,
) -> None:
- if event.get_tag("sample_event") == "true":
- logger.info(
- "insert: attempting to insert event in SnubaProtocolEventStream",
- extra={
- "event.id": event.event_id,
- "project_id": event.project_id,
- "sample_event": True,
- },
- )
if isinstance(event, GroupEvent) and not event.occurrence:
logger.error(
"`GroupEvent` passed to `EventStream.insert`. `GroupEvent` may only be passed when "
diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py
index 8f688d52cd6f68..8e3241aa02668e 100644
--- a/src/sentry/features/temporary.py
+++ b/src/sentry/features/temporary.py
@@ -61,8 +61,6 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:anomaly-detection-alerts-charts", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enable anr frame analysis
manager.add("organizations:anr-analyze-frames", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
- # Enable auth provider configuration through api
- manager.add("organizations:api-auth-provider", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
manager.add("organizations:api-keys", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, default=False, api_expose=True)
# Rollout of the new API rate limits for organization events
manager.add("organizations:api-organization_events-rate-limit-reduced-rollout", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
@@ -111,8 +109,6 @@ def register_temporary_features(manager: FeatureManager):
# Enable the dev toolbar PoC code for employees
# Data Secrecy
manager.add("organizations:data-secrecy", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
- # Enable default metric alerts for new projects
- manager.add("organizations:default-metric-alerts-new-projects", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
manager.add("organizations:devtoolbar", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, default=False, api_expose=True)
manager.add("organizations:email-performance-regression-image", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False)
# Enables automatically deriving of code mappings
@@ -147,8 +143,6 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:increased-issue-owners-rate-limit", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Starfish: extract metrics from the spans
manager.add("organizations:indexed-spans-extraction", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
- # Enable custom alert priorities for Pagerduty and Opsgenie
- manager.add("organizations:integrations-custom-alert-priorities", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable integration functionality to work deployment integrations like Vercel
manager.add("organizations:integrations-deployment", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, default=True, api_expose=True)
manager.add("organizations:integrations-feature-flag-integration", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
@@ -193,14 +187,12 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:issue-stream-search-query-builder", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable issue stream table layout changes
manager.add("organizations:issue-stream-table-layout", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # When enabled, uses the functional issue stream component
+ manager.add("organizations:issue-stream-functional-refactor", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
manager.add("organizations:large-debug-files", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
manager.add("organizations:metric-issue-poc", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
- # Enable members to invite teammates to organizations
- manager.add("organizations:members-invite-teammates", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
manager.add("organizations:mep-rollout-flag", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
manager.add("organizations:mep-use-default-tags", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
- # Enable messaging-integration onboarding when creating a new project
- manager.add("organizations:messaging-integration-onboarding-project-creation", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable threshold period in metric alert rule builder
manager.add("organizations:metric-alert-threshold-period", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Migrate Orgs to new Azure DevOps Integration
@@ -327,6 +319,8 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:performance-use-metrics", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=True)
# Enable showing INP web vital in default views
manager.add("organizations:performance-vitals-inp", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
+ # Enable handling missing webvitals in performance score
+ manager.add("organizations:performance-vitals-handle-missing-webvitals", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable profiling
manager.add("organizations:profiling", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=True)
# Enabled for those orgs who participated in the profiling Beta program
@@ -360,10 +354,6 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:release-comparison-performance", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# enable new release set_commits functionality
manager.add("organizations:set-commits-updated", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
- # Enable new release UI
- manager.add("organizations:releases-v2", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
- manager.add("organizations:releases-v2-internal", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
- manager.add("organizations:releases-v2-st", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Enable playing replays from the replay tab
manager.add("organizations:replay-play-from-replay-tab", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable version 2 of reprocessing (completely distinct from v1)
@@ -424,6 +414,8 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:session-replay-ui", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, default=True, api_expose=True)
# Enable replay web vital breadcrumbs
manager.add("organizations:session-replay-web-vitals", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, default=False, api_expose=True)
+ # Enable GA banner for mobile replay beta orgs about the grace period that will last 2 months. Flag can be removed after March 7th 2024.
+ manager.add("organizations:mobile-replay-beta-orgs", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, default=False, api_expose=False)
# Enable Dev Toolbar frontend features (ex project settings page)
manager.add("organizations:dev-toolbar-ui", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, default=False, api_expose=True)
# Lets organizations manage grouping configs
@@ -433,8 +425,6 @@ def register_temporary_features(manager: FeatureManager):
# Add regression chart as image to slack message
manager.add("organizations:slack-endpoint-regression-image", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False)
manager.add("organizations:slack-function-regression-image", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False)
- # Enable linking to Slack alerts from multiple teams to a single channel
- manager.add("organizations:slack-multiple-team-single-channel-linking", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
manager.add("organizations:stacktrace-processing-caching", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Enable SAML2 Single-logout
manager.add("organizations:sso-saml2-slo", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False)
@@ -456,12 +446,14 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:insights-related-issues-table", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable access to Mobile Screens insights module
manager.add("organizations:insights-mobile-screens-module", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # Enable access to insights crons view (moved from crons sidebar)
+ manager.add("organizations:insights-crons", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # Enable access to insights uptime view
+ manager.add("organizations:insights-uptime", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable standalone span ingestion
manager.add("organizations:standalone-span-ingestion", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Enable the aggregate span waterfall view
manager.add("organizations:starfish-aggregate-span-waterfall", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
- # Enable bundle analysis ui and endpoint
- manager.add("organizations:starfish-browser-resource-module-bundle-analysis", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enables the resource module ui
manager.add("organizations:starfish-browser-resource-module-image-view", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enables the resource module ui
@@ -535,14 +527,16 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:visibility-explore-view", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable the dataset toggle on the new explore page
manager.add("organizations:visibility-explore-dataset", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
- # Enable minimap in the widget viewer modal in dashboards
- manager.add("organizations:widget-viewer-modal-minimap", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # Enable RPC on the new explore page
+ manager.add("organizations:visibility-explore-rpc", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enabled unresolved issue webhook for organization
manager.add("organizations:webhooks-unresolved", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=True)
# Enable dual writing for metric alert issues (see: alerts create issues)
- manager.add("organizations:workflow-engine-m3-dual-write", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
+ manager.add("organizations:workflow-engine-metric-alert-dual-write", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
+ # Enable Processing for Metric Alerts in the workflow_engine
+ manager.add("organizations:workflow-engine-metric-alert-processing", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enable reading from new ACI tables for metric alert issues (see: alerts create issues)
- manager.add("organizations:workflow-engine-m3-read", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
+ manager.add("organizations:workflow-engine-metric-alert-read", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enable new workflow_engine UI (see: alerts create issues)
manager.add("organizations:workflow-engine-ui", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable EventUniqueUserFrequencyConditionWithConditions special alert condition
@@ -570,16 +564,12 @@ def register_temporary_features(manager: FeatureManager):
manager.add("projects:first-event-severity-calculation", ProjectFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Enable escalation detection for new issues
manager.add("projects:first-event-severity-new-escalation", ProjectFeature, FeatureHandlerStrategy.INTERNAL, default=True, api_expose=False)
- # Enable functionality for attaching minidumps to events and displaying
- # them in the group UI.
- manager.add("projects:minidump", ProjectFeature, FeatureHandlerStrategy.INTERNAL, default=True, api_expose=True)
# Enable alternative version of group creation that is supposed to be less racy.
manager.add("projects:race-free-group-creation", ProjectFeature, FeatureHandlerStrategy.INTERNAL, default=True, api_expose=False)
# Enable similarity embeddings API call
# This feature is only available on the frontend using project details since the handler gets
# project options and this is slow in the project index endpoint feature flag serialization
manager.add("projects:similarity-embeddings", ProjectFeature, FeatureHandlerStrategy.INTERNAL, default=False, api_expose=True)
- manager.add("projects:similarity-embeddings-backfill", ProjectFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False)
manager.add("projects:similarity-embeddings-delete-by-hash", ProjectFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False)
manager.add("projects:similarity-indexing", ProjectFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
manager.add("projects:similarity-view", ProjectFeature, FeatureHandlerStrategy.INTERNAL, api_expose=True)
@@ -623,3 +613,11 @@ def register_temporary_features(manager: FeatureManager):
FeatureHandlerStrategy.FLAGPOLE,
api_expose=False,
)
+
+ # Controls access to tempest features
+ manager.add(
+ "organizations:tempest-access",
+ OrganizationFeature,
+ FeatureHandlerStrategy.FLAGPOLE,
+ api_expose=True,
+ )
diff --git a/src/sentry/feedback/usecases/create_feedback.py b/src/sentry/feedback/usecases/create_feedback.py
index 3a9fb3b267c475..b1cfc6ffb34ba7 100644
--- a/src/sentry/feedback/usecases/create_feedback.py
+++ b/src/sentry/feedback/usecases/create_feedback.py
@@ -230,7 +230,6 @@ def create_feedback_issue(event, project_id: int, source: FeedbackCreationSource
"feedback.create_feedback_issue.entered",
tags={
"referrer": source.value,
- "client_source": get_path(event, "contexts", "feedback", "source"),
},
)
@@ -257,7 +256,6 @@ def create_feedback_issue(event, project_id: int, source: FeedbackCreationSource
tags={
"is_spam": is_message_spam,
"referrer": source.value,
- "client_source": event["contexts"]["feedback"].get("source"),
},
sample_rate=1.0,
)
@@ -345,7 +343,6 @@ def create_feedback_issue(event, project_id: int, source: FeedbackCreationSource
"feedback.create_feedback_issue.produced_occurrence",
tags={
"referrer": source.value,
- "client_source": event["contexts"]["feedback"].get("source"),
},
sample_rate=1.0,
)
diff --git a/src/sentry/feedback/usecases/spam_detection.py b/src/sentry/feedback/usecases/spam_detection.py
index e567d130fdece1..40f88332a015fc 100644
--- a/src/sentry/feedback/usecases/spam_detection.py
+++ b/src/sentry/feedback/usecases/spam_detection.py
@@ -34,8 +34,8 @@ def make_input_prompt(message: str):
@metrics.wraps("feedback.spam_detection", sample_rate=1.0)
def is_spam(message: str):
- is_spam = False
- trimmed_response = ""
+ labeled_spam = False
+ _trimmed_response = ""
response = complete_prompt(
usecase=LLMUseCase.SPAM_DETECTION,
message=make_input_prompt(message),
@@ -43,18 +43,9 @@ def is_spam(message: str):
max_output_tokens=20,
)
if response:
- is_spam, trimmed_response = trim_response(response)
-
- logger.info(
- "Spam detection",
- extra={
- "feedback_message": message,
- "is_spam": is_spam,
- "response": response,
- "trimmed_response": trimmed_response,
- },
- )
- return is_spam
+ labeled_spam, _trimmed_response = trim_response(response)
+
+ return labeled_spam
def trim_response(text):
diff --git a/src/sentry/flags/docs/api.md b/src/sentry/flags/docs/api.md
index 5e63588b40142c..11a8313b8328ca 100644
--- a/src/sentry/flags/docs/api.md
+++ b/src/sentry/flags/docs/api.md
@@ -147,7 +147,54 @@ Delete a signing secret.
## Webhooks [/organizations//flags/hooks/provider//]
-### Create Flag Log [POST]
+### Create Generic Flag Log [POST]
+
+A flag log event must be emitted after every flag definition change which influences a flag's evaluation. Updates to a flag that do not change a flag's evaluation logic do not need to be emitted to this endpoint. We are only concerned with changes which could have influenced behavior.
+
+Sentry does not currently have a concept of disambiguating flag changes by project or environment. Everything is done at the organization level. Flag changes that are duplicated across projects, environments, or other groupings within the provider, must be de-duplicated. To support this, the posted payload sets a "change_id" field for idempotency. In the presence of duplicate ids, only one audit-log record is written in Sentry.
+
+**Data Attributes**
+
+| Column | Type | Description |
+| --------------- | ------ | -------------------------------------------------------------- |
+| action | string | Enum of `created`, `updated`, or `deleted`. |
+| change_id | number | A 64-bit idempotency token representing a unique change group. |
+| created_at | string | String formatted UTC date time: YYYY-MM-DDTHH:MM:SS. |
+| created_by | object | Created-by object. |
+| created_by.id | string | User identifier which made the change. |
+| created_by.type | string | Enum of `email`, `id`, or `name`. |
+| flag | string | The name of the flag changed. |
+
+**Meta Attributes**
+
+| Column | Type | Description |
+| ------- | ---- | --------------------- |
+| version | int | The protocol version. |
+
+- Request (application/json)
+
+ ```json
+ {
+ "data": [
+ {
+ "action": "created",
+ "created_at": "2024-12-12T00:02:00+00:00",
+ "created_by": {
+ "id": "first.last@company.com",
+ "type": "email"
+ },
+ "flag": "hello.world"
+ }
+ ],
+ "meta": {
+ "version": 1
+ }
+ }
+ ```
+
+- Response 201
+
+### Create Provider-Specific Flag Log [POST]
The shape of the request object varies by provider. The `` URI parameter informs the server of the shape of the request and it is on the server to handle the provider. The following providers are supported: LaunchDarkly.
diff --git a/src/sentry/flags/endpoints/__init__.py b/src/sentry/flags/endpoints/__init__.py
index be4cef59b09284..e69de29bb2d1d6 100644
--- a/src/sentry/flags/endpoints/__init__.py
+++ b/src/sentry/flags/endpoints/__init__.py
@@ -1,12 +0,0 @@
-from sentry.api.bases.organization import OrganizationEndpoint
-from sentry.api.exceptions import ResourceDoesNotExist
-
-VALID_PROVIDERS = {"launchdarkly"}
-
-
-class OrganizationFlagsEndpoint(OrganizationEndpoint):
-
- def convert_args(self, *args, **kwargs):
- if kwargs.get("provider", "") not in VALID_PROVIDERS:
- raise ResourceDoesNotExist
- return super().convert_args(*args, **kwargs)
diff --git a/src/sentry/flags/endpoints/hooks.py b/src/sentry/flags/endpoints/hooks.py
index e223f602df2363..b948a88ec7ec5b 100644
--- a/src/sentry/flags/endpoints/hooks.py
+++ b/src/sentry/flags/endpoints/hooks.py
@@ -6,20 +6,14 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
+from sentry.api.bases.organization import OrganizationEndpoint
from sentry.api.exceptions import ResourceDoesNotExist
-from sentry.flags.endpoints import OrganizationFlagsEndpoint
-from sentry.flags.providers import (
- DeserializationError,
- InvalidProvider,
- handle_provider_event,
- validate_provider_event,
- write,
-)
+from sentry.flags.providers import DeserializationError, get_provider, write
from sentry.models.organization import Organization
@region_silo_endpoint
-class OrganizationFlagsHooksEndpoint(OrganizationFlagsEndpoint):
+class OrganizationFlagsHooksEndpoint(OrganizationEndpoint):
authentication_classes = ()
owner = ApiOwner.REPLAY
permission_classes = ()
@@ -32,18 +26,14 @@ def post(self, request: Request, organization: Organization, provider: str) -> R
return Response("Not enabled.", status=404)
try:
- if not validate_provider_event(
- provider,
- request.body,
- request.headers,
- organization.id,
- ):
+ provider_cls = get_provider(organization.id, provider, request.headers)
+ if provider_cls is None:
+ raise ResourceDoesNotExist
+ elif not provider_cls.validate(request.body):
return Response("Not authorized.", status=401)
-
- write(handle_provider_event(provider, request.data, organization.id))
- return Response(status=200)
- except InvalidProvider:
- raise ResourceDoesNotExist
+ else:
+ write(provider_cls.handle(request.data))
+ return Response(status=200)
except DeserializationError as exc:
sentry_sdk.capture_exception()
return Response(exc.errors, status=200)
diff --git a/src/sentry/flags/endpoints/secrets.py b/src/sentry/flags/endpoints/secrets.py
index 17a03d98b3a045..d572c9d5efb805 100644
--- a/src/sentry/flags/endpoints/secrets.py
+++ b/src/sentry/flags/endpoints/secrets.py
@@ -11,7 +11,10 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
-from sentry.api.bases.organization import OrganizationEndpoint, OrgAuthTokenPermission
+from sentry.api.bases.organization import (
+ OrganizationEndpoint,
+ OrganizationFlagWebHookSigningSecretPermission,
+)
from sentry.api.paginator import OffsetPaginator
from sentry.api.serializers import Serializer, register, serialize
from sentry.flags.models import FlagWebHookSigningSecretModel
@@ -39,14 +42,16 @@ def serialize(self, obj, attrs, user, **kwargs) -> FlagWebhookSigningSecretRespo
class FlagWebhookSigningSecretValidator(serializers.Serializer):
- provider = serializers.ChoiceField(choices=[("launchdarkly", "launchdarkly")], required=True)
+ provider = serializers.ChoiceField(
+ choices=["launchdarkly", "generic", "unleash"], required=True
+ )
secret = serializers.CharField(required=True, max_length=32, min_length=32)
@region_silo_endpoint
class OrganizationFlagsWebHookSigningSecretsEndpoint(OrganizationEndpoint):
owner = ApiOwner.REPLAY
- permission_classes = (OrgAuthTokenPermission,)
+ permission_classes = (OrganizationFlagWebHookSigningSecretPermission,)
publish_status = {
"GET": ApiPublishStatus.PRIVATE,
"POST": ApiPublishStatus.PRIVATE,
@@ -95,7 +100,7 @@ def post(self, request: Request, organization: Organization) -> Response:
@region_silo_endpoint
class OrganizationFlagsWebHookSigningSecretEndpoint(OrganizationEndpoint):
owner = ApiOwner.REPLAY
- permission_classes = (OrgAuthTokenPermission,)
+ permission_classes = (OrganizationFlagWebHookSigningSecretPermission,)
publish_status = {"DELETE": ApiPublishStatus.PRIVATE}
def delete(
diff --git a/src/sentry/flags/providers.py b/src/sentry/flags/providers.py
index 8232a6cf00be58..6b7e8077df4c36 100644
--- a/src/sentry/flags/providers.py
+++ b/src/sentry/flags/providers.py
@@ -1,10 +1,12 @@
import datetime
import hashlib
import hmac
-from typing import Any, TypedDict
+from collections.abc import Callable, Iterator
+from typing import Any, Protocol, TypedDict, TypeVar
from django.http.request import HttpHeaders
from rest_framework import serializers
+from rest_framework.exceptions import ValidationError
from sentry.flags.models import (
ACTION_MAP,
@@ -32,6 +34,8 @@ def write(rows: list["FlagAuditLogRow"]) -> None:
the underlying systems.
"""
+T = TypeVar("T", contravariant=True)
+
class FlagAuditLogRow(TypedDict):
"""A complete flag audit log row instance."""
@@ -45,6 +49,16 @@ class FlagAuditLogRow(TypedDict):
tags: dict[str, Any]
+class ProviderProtocol(Protocol[T]):
+ organization_id: int
+ provider_name: str
+ signature: str | None
+
+ def __init__(self, organization_id: int, signature: str | None) -> None: ...
+ def handle(self, message: T) -> list[FlagAuditLogRow]: ...
+ def validate(self, message_bytes: bytes) -> bool: ...
+
+
class DeserializationError(Exception):
"""The request body could not be deserialized."""
@@ -58,29 +72,18 @@ class InvalidProvider(Exception):
...
-def handle_provider_event(
- provider: str,
- request_data: dict[str, Any],
- organization_id: int,
-) -> list[FlagAuditLogRow]:
- match provider:
+def get_provider(
+ organization_id: int, provider_name: str, headers: HttpHeaders
+) -> ProviderProtocol[dict[str, Any]] | None:
+ match provider_name:
case "launchdarkly":
- return handle_launchdarkly_event(request_data, organization_id)
+ return LaunchDarklyProvider(organization_id, signature=headers.get("X-LD-Signature"))
+ case "generic":
+ return GenericProvider(organization_id, signature=headers.get("X-Sentry-Signature"))
+ case "unleash":
+ return UnleashProvider(organization_id, signature=headers.get("Authorization"))
case _:
- raise InvalidProvider(provider)
-
-
-def validate_provider_event(
- provider: str,
- request_data: bytes,
- request_headers: HttpHeaders,
- organization_id: int,
-) -> bool:
- match provider:
- case "launchdarkly":
- return validate_launchdarkly_event(request_data, request_headers, organization_id)
- case _:
- raise InvalidProvider(provider)
+ return None
"""LaunchDarkly provider."""
@@ -115,7 +118,49 @@ class LaunchDarklyItemSerializer(serializers.Serializer):
}
-def handle_launchdarkly_actions(action: str) -> int:
+class LaunchDarklyProvider:
+ provider_name = "launchdarkly"
+
+ def __init__(self, organization_id: int, signature: str | None) -> None:
+ self.organization_id = organization_id
+ self.signature = signature
+
+ def handle(self, message: dict[str, Any]) -> list[FlagAuditLogRow]:
+ serializer = LaunchDarklyItemSerializer(data=message)
+ if not serializer.is_valid():
+ raise DeserializationError(serializer.errors)
+
+ result = serializer.validated_data
+
+ access = result["accesses"][0]
+ if access["action"] not in SUPPORTED_LAUNCHDARKLY_ACTIONS:
+ return []
+
+ return [
+ {
+ "action": _handle_launchdarkly_actions(access["action"]),
+ "created_at": datetime.datetime.fromtimestamp(
+ result["date"] / 1000.0, datetime.UTC
+ ),
+ "created_by": result["member"]["email"],
+ "created_by_type": CREATED_BY_TYPE_MAP["email"],
+ "flag": result["name"],
+ "organization_id": self.organization_id,
+ "tags": {"description": result["description"]},
+ }
+ ]
+
+ def validate(self, message_bytes: bytes) -> bool:
+ validator = PayloadSignatureValidator(
+ self.organization_id,
+ self.provider_name,
+ message_bytes,
+ self.signature,
+ )
+ return validator.validate()
+
+
+def _handle_launchdarkly_actions(action: str) -> int:
if action == "createFlag" or action == "cloneFlag":
return ACTION_MAP["created"]
if action == "deleteFlag":
@@ -124,60 +169,188 @@ def handle_launchdarkly_actions(action: str) -> int:
return ACTION_MAP["updated"]
-def handle_launchdarkly_event(
- request_data: dict[str, Any], organization_id: int
-) -> list[FlagAuditLogRow]:
- serializer = LaunchDarklyItemSerializer(data=request_data)
- if not serializer.is_valid():
- raise DeserializationError(serializer.errors)
-
- result = serializer.validated_data
-
- access = result["accesses"][0]
- if access["action"] not in SUPPORTED_LAUNCHDARKLY_ACTIONS:
- return []
-
- return [
- {
- "action": handle_launchdarkly_actions(access["action"]),
- "created_at": datetime.datetime.fromtimestamp(result["date"] / 1000.0, datetime.UTC),
- "created_by": result["member"]["email"],
- "created_by_type": CREATED_BY_TYPE_MAP["email"],
- "flag": result["name"],
- "organization_id": organization_id,
- "tags": {"description": result["description"]},
- }
- ]
-
-
-def validate_launchdarkly_event(
- request_data: bytes,
- request_headers: HttpHeaders,
- organization_id: int,
-) -> bool:
- """Return "true" if the launchdarkly payload is valid."""
- signature = request_headers.get("X-LD-Signature")
- if signature is None:
- return False
+"""Generic provider.
- models = FlagWebHookSigningSecretModel.objects.filter(
- organization_id=organization_id,
- provider="launchdarkly",
- ).all()
- for model in models:
- if hmac_sha256_hex_digest(model.secret, request_data) == signature:
- return True
- return False
+The generic provider represents a Sentry-defined generic web hook
+interface that anyone can integrate with.
+"""
-def hmac_sha256_hex_digest(key: str, message: bytes):
- return hmac.new(key.encode(), message, hashlib.sha256).hexdigest()
+class GenericItemCreatedBySerializer(serializers.Serializer):
+ id = serializers.CharField(required=True, max_length=100)
+ type = serializers.ChoiceField(choices=(("email", 0), ("id", 1), ("name", 2)), required=True)
-"""Internal flag-pole provider.
+class GenericItemSerializer(serializers.Serializer):
+ action = serializers.ChoiceField(
+ choices=(("created", 0), ("updated", 1), ("deleted", 2)), required=True
+ )
+ change_id = serializers.IntegerField(required=True)
+ created_at = serializers.DateTimeField(required=True)
+ created_by = GenericItemCreatedBySerializer(required=True)
+ flag = serializers.CharField(required=True, max_length=100)
+
+
+class GenericMetaSerializer(serializers.Serializer):
+ version = serializers.IntegerField(required=True)
+
+
+class GenericRequestSerializer(serializers.Serializer):
+ data = GenericItemSerializer(many=True, required=True) # type: ignore[assignment]
+ meta = GenericMetaSerializer(required=True)
+
+
+class GenericProvider:
+ provider_name = "generic"
+
+ def __init__(self, organization_id: int, signature: str | None) -> None:
+ self.organization_id = organization_id
+ self.signature = signature
+
+ def handle(self, message: dict[str, Any]) -> list[FlagAuditLogRow]:
+ serializer = GenericRequestSerializer(data=message)
+ if not serializer.is_valid():
+ raise DeserializationError(serializer.errors)
+
+ seen = set()
+ result: list[FlagAuditLogRow] = []
+ for item in serializer.validated_data["data"]:
+ if item["change_id"] not in seen:
+ seen.add(item["change_id"])
+ result.append(
+ {
+ "action": ACTION_MAP[item["action"]],
+ "created_at": item["created_at"],
+ "created_by": item["created_by"]["id"],
+ "created_by_type": CREATED_BY_TYPE_MAP[item["created_by"]["type"]],
+ "flag": item["flag"],
+ "organization_id": self.organization_id,
+ "tags": {},
+ }
+ )
+
+ return result
+
+ def validate(self, message_bytes: bytes) -> bool:
+ validator = PayloadSignatureValidator(
+ self.organization_id,
+ self.provider_name,
+ message_bytes,
+ self.signature,
+ )
+ return validator.validate()
+
+
+"""Unleash provider."""
+
+SUPPORTED_UNLEASH_ACTIONS = {
+ "feature-created",
+ "feature-archived",
+ "feature-revived",
+ "feature-updated",
+ "feature-strategy-update",
+ "feature-strategy-add",
+ "feature-strategy-remove",
+ "feature-stale-on",
+ "feature-stale-off",
+ "feature-completed",
+ "feature-environment-enabled",
+ "feature-environment-disabled",
+}
-Allows us to skip the HTTP endpoint.
-"""
+
+class UnleashItemSerializer(serializers.Serializer):
+ # Technically featureName is not required by Unleash, but for all the actions we care about, it should exist.
+ featureName = serializers.CharField(max_length=100, required=True)
+ createdAt = serializers.DateTimeField(
+ required=True,
+ input_formats=["iso-8601"],
+ format=None,
+ default_timezone=datetime.UTC,
+ )
+ createdBy = serializers.CharField(required=True)
+ createdByUserId = serializers.IntegerField(required=False, allow_null=True)
+ type = serializers.CharField(allow_blank=True, required=True)
+ tags = serializers.ListField(
+ child=serializers.DictField(child=serializers.CharField()), required=False, allow_null=True
+ )
+ project = serializers.CharField(required=False, allow_null=True)
+ environment = serializers.CharField(required=False, allow_null=True)
+
+
+def _get_user(validated_event: dict[str, Any]) -> tuple[str, int]:
+ """If the email is not valid, default to the user ID sent by Unleash."""
+ created_by = validated_event["createdBy"]
+ try:
+ serializers.EmailField().run_validation(created_by)
+ return created_by, CREATED_BY_TYPE_MAP["email"]
+ except ValidationError:
+ pass
+
+ if "createdByUserId" in validated_event:
+ return validated_event["createdByUserId"], CREATED_BY_TYPE_MAP["id"]
+ return created_by, CREATED_BY_TYPE_MAP["name"]
+
+
+class UnleashProvider:
+ provider_name = "unleash"
+
+ def __init__(self, organization_id: int, signature: str | None) -> None:
+ self.organization_id = organization_id
+ self.signature = signature
+
+ def handle(self, message: dict[str, Any]) -> list[FlagAuditLogRow]:
+ serializer = UnleashItemSerializer(data=message)
+ if not serializer.is_valid():
+ raise DeserializationError(serializer.errors)
+
+ result = serializer.validated_data
+ action = result["type"]
+
+ if action not in SUPPORTED_UNLEASH_ACTIONS:
+ return []
+
+ created_by, created_by_type = _get_user(result)
+ unleash_tags = result.get("tags") or []
+ tags = {tag["type"]: tag["value"] for tag in unleash_tags}
+
+ if result.get("project"):
+ tags["project"] = result.get("project")
+
+ if result.get("environment"):
+ tags["environment"] = result.get("environment")
+
+ return [
+ {
+ "action": _handle_unleash_actions(action),
+ "created_at": result["createdAt"],
+ "created_by": created_by,
+ "created_by_type": created_by_type,
+ "flag": result["featureName"],
+ "organization_id": self.organization_id,
+ "tags": tags,
+ }
+ ]
+
+ def validate(self, message_bytes: bytes) -> bool:
+ validator = AuthTokenValidator(
+ self.organization_id,
+ self.provider_name,
+ self.signature,
+ )
+ return validator.validate()
+
+
+def _handle_unleash_actions(action: str) -> int:
+ if action == "feature-created":
+ return ACTION_MAP["created"]
+ if action == "feature-archived":
+ return ACTION_MAP["deleted"]
+ else:
+ return ACTION_MAP["updated"]
+
+
+"""Flagpole provider."""
class FlagAuditLogItem(TypedDict):
@@ -205,3 +378,81 @@ def handle_flag_pole_event_internal(items: list[FlagAuditLogItem], organization_
for item in items
]
)
+
+
+"""Helpers."""
+
+
+class AuthTokenValidator:
+ """Abstract payload validator.
+
+ Similar to the SecretValidator class below, except we do not need
+ to validate the authorization string.
+ """
+
+ def __init__(
+ self,
+ organization_id: int,
+ provider: str,
+ signature: str | None,
+ secret_finder: Callable[[int, str], Iterator[str]] | None = None,
+ ) -> None:
+ self.organization_id = organization_id
+ self.provider = provider
+ self.signature = signature
+ self.secret_finder = secret_finder or _query_signing_secrets
+
+ def validate(self) -> bool:
+ if self.signature is None:
+ return False
+
+ for secret in self.secret_finder(self.organization_id, self.provider):
+ if secret == self.signature:
+ return True
+
+ return False
+
+
+class PayloadSignatureValidator:
+ """Abstract payload validator.
+
+ Allows us to inject dependencies for differing use cases. Specifically
+ the test suite.
+ """
+
+ def __init__(
+ self,
+ organization_id: int,
+ provider: str,
+ request_body: bytes,
+ signature: str | None,
+ secret_finder: Callable[[int, str], Iterator[str]] | None = None,
+ secret_validator: Callable[[str, bytes], str] | None = None,
+ ) -> None:
+ self.organization_id = organization_id
+ self.provider = provider
+ self.request_body = request_body
+ self.signature = signature
+ self.secret_finder = secret_finder or _query_signing_secrets
+ self.secret_validator = secret_validator or hmac_sha256_hex_digest
+
+ def validate(self) -> bool:
+ if self.signature is None:
+ return False
+
+ for secret in self.secret_finder(self.organization_id, self.provider):
+ if self.secret_validator(secret, self.request_body) == self.signature:
+ return True
+ return False
+
+
+def _query_signing_secrets(organization_id: int, provider: str) -> Iterator[str]:
+ for model in FlagWebHookSigningSecretModel.objects.filter(
+ organization_id=organization_id,
+ provider=provider,
+ ).all():
+ yield model.secret
+
+
+def hmac_sha256_hex_digest(key: str, message: bytes):
+ return hmac.new(key.encode(), message, hashlib.sha256).hexdigest()
diff --git a/src/sentry/grouping/api.py b/src/sentry/grouping/api.py
index afebb8edb6fc62..9bd0488908593c 100644
--- a/src/sentry/grouping/api.py
+++ b/src/sentry/grouping/api.py
@@ -8,9 +8,11 @@
import sentry_sdk
from sentry import options
+from sentry.db.models.fields.node import NodeData
from sentry.grouping.component import (
AppGroupingComponent,
BaseGroupingComponent,
+ ContributingComponent,
DefaultGroupingComponent,
SystemGroupingComponent,
)
@@ -20,6 +22,7 @@
from sentry.grouping.strategies.configurations import CONFIGURATIONS
from sentry.grouping.utils import (
expand_title_template,
+ get_fingerprint_type,
hash_from_values,
is_default_fingerprint_var,
resolve_fingerprint_values,
@@ -83,7 +86,7 @@ def get_config_dict(self, project: Project) -> GroupingConfig:
"enhancements": self._get_enhancements(project),
}
- def _get_enhancements(self, project) -> str:
+ def _get_enhancements(self, project: Project) -> str:
project_enhancements = project.get_option("sentry:grouping_enhancements")
config_id = self._get_config_id(project)
@@ -112,14 +115,14 @@ def _get_enhancements(self, project) -> str:
cache.set(cache_key, enhancements)
return enhancements
- def _get_config_id(self, project):
+ def _get_config_id(self, project: Project) -> str:
raise NotImplementedError
class ProjectGroupingConfigLoader(GroupingConfigLoader):
option_name: str # Set in subclasses
- def _get_config_id(self, project):
+ def _get_config_id(self, project: Project) -> str:
return project.get_option(
self.option_name,
validate=lambda x: x in CONFIGURATIONS,
@@ -145,29 +148,29 @@ class BackgroundGroupingConfigLoader(GroupingConfigLoader):
cache_prefix = "background-grouping-enhancements:"
- def _get_config_id(self, project):
+ def _get_config_id(self, _project: Project) -> str:
return options.get("store.background-grouping-config-id")
@sentry_sdk.tracing.trace
-def get_grouping_config_dict_for_project(project) -> GroupingConfig:
+def get_grouping_config_dict_for_project(project: Project) -> GroupingConfig:
"""Fetches all the information necessary for grouping from the project
settings. The return value of this is persisted with the event on
ingestion so that the grouping algorithm can be re-run later.
This is called early on in normalization so that everything that is needed
- to group the project is pulled into the event.
+ to group the event is pulled into the event data.
"""
loader = PrimaryGroupingConfigLoader()
return loader.get_config_dict(project)
-def get_grouping_config_dict_for_event_data(data, project) -> GroupingConfig:
+def get_grouping_config_dict_for_event_data(data: NodeData, project: Project) -> GroupingConfig:
"""Returns the grouping config for an event dictionary."""
return data.get("grouping_config") or get_grouping_config_dict_for_project(project)
-def get_default_enhancements(config_id=None) -> str:
+def get_default_enhancements(config_id: str | None = None) -> str:
base: str | None = DEFAULT_GROUPING_ENHANCEMENTS_BASE
if config_id is not None:
base = CONFIGURATIONS[config_id].enhancements_base
@@ -191,7 +194,7 @@ def get_projects_default_fingerprinting_bases(
return bases
-def get_default_grouping_config_dict(config_id=None) -> GroupingConfig:
+def get_default_grouping_config_dict(config_id: str | None = None) -> GroupingConfig:
"""Returns the default grouping config."""
if config_id is None:
from sentry.projectoptions.defaults import DEFAULT_GROUPING_CONFIG
@@ -200,17 +203,16 @@ def get_default_grouping_config_dict(config_id=None) -> GroupingConfig:
return {"id": config_id, "enhancements": get_default_enhancements(config_id)}
-def load_grouping_config(config_dict=None) -> StrategyConfiguration:
+def load_grouping_config(config_dict: GroupingConfig | None = None) -> StrategyConfiguration:
"""Loads the given grouping config."""
if config_dict is None:
config_dict = get_default_grouping_config_dict()
elif "id" not in config_dict:
raise ValueError("Malformed configuration dictionary")
- config_dict = dict(config_dict)
- config_id = config_dict.pop("id")
+ config_id = config_dict["id"]
if config_id not in CONFIGURATIONS:
raise GroupingConfigNotFound(config_id)
- return CONFIGURATIONS[config_id](**config_dict)
+ return CONFIGURATIONS[config_id](enhancements=config_dict["enhancements"])
def load_default_grouping_config() -> StrategyConfiguration:
@@ -249,9 +251,7 @@ def get_fingerprinting_config_for_project(
def apply_server_fingerprinting(
- event: MutableMapping[str, Any],
- fingerprinting_config: FingerprintingRules,
- allow_custom_title: bool = True,
+ event: MutableMapping[str, Any], fingerprinting_config: FingerprintingRules
) -> None:
fingerprint_info = {}
@@ -268,7 +268,7 @@ def apply_server_fingerprinting(
# A custom title attribute is stored in the event to override the
# default title.
- if "title" in attributes and allow_custom_title:
+ if "title" in attributes:
event["title"] = expand_title_template(attributes["title"], event)
event["fingerprint"] = new_fingerprint
@@ -280,23 +280,35 @@ def apply_server_fingerprinting(
event["_fingerprint_info"] = fingerprint_info
-def _get_component_trees_for_variants(
+def _get_variants_from_strategies(
event: Event, context: GroupingContext
-) -> dict[str, AppGroupingComponent | SystemGroupingComponent | DefaultGroupingComponent]:
+) -> dict[str, ComponentVariant]:
winning_strategy: str | None = None
precedence_hint: str | None = None
- all_strategies_components_by_variant: dict[str, list[BaseGroupingComponent]] = {}
+ all_strategies_components_by_variant: dict[str, list[BaseGroupingComponent[Any]]] = {}
+ winning_strategy_components_by_variant = {}
+ # `iter_strategies` presents strategies in priority order, which allows us to go with the first
+ # one which produces a result. (See `src/sentry/grouping/strategies/configurations.py` for the
+ # strategies used by each config.)
for strategy in context.config.iter_strategies():
- # Defined in src/sentry/grouping/strategies/base.py
current_strategy_components_by_variant = strategy.get_grouping_components(
event, context=context
)
for variant_name, component in current_strategy_components_by_variant.items():
all_strategies_components_by_variant.setdefault(variant_name, []).append(component)
- if winning_strategy is None:
- if component.contributes:
+ if component.contributes:
+ if winning_strategy is None:
+ # If we haven't yet found a winner.. now we have!
+ #
+ # The value of `current_strategy_components_by_variant` will change with each
+ # strategy, so grab a separate reference to the winning ones so we don't lose
+ # track of them
+ #
+ # Also, create a hint we can add to components from other strategies indicating
+ # that this one took precedence
+ winning_strategy_components_by_variant = current_strategy_components_by_variant
winning_strategy = strategy.name
variant_descriptor = "/".join(
sorted(
@@ -313,10 +325,13 @@ def _get_component_trees_for_variants(
),
"" if strategy.name.endswith("s") else "s",
)
- elif component.contributes and winning_strategy != strategy.name:
- component.update(contributes=False, hint=precedence_hint)
+ # On the other hand, if another strategy before this one was already the winner, we
+ # don't want any of this strategy's components to contribute to grouping
+ elif strategy.name != winning_strategy:
+ component.update(contributes=False, hint=precedence_hint)
+
+ variants = {}
- component_trees_by_variant = {}
for variant_name, components in all_strategies_components_by_variant.items():
component_class_by_variant = {
"app": AppGroupingComponent,
@@ -324,11 +339,28 @@ def _get_component_trees_for_variants(
"system": SystemGroupingComponent,
}
root_component = component_class_by_variant[variant_name](values=components)
+
+ # The root component will pull its `contributes` value from the components it wraps - if
+ # none of them contributes, it will also be marked as non-contributing. But those components
+ # might not have the same reasons for not contributing (`hint` values), so it can't pull
+ # that them - it's gotta be set here.
if not root_component.contributes and precedence_hint:
root_component.update(hint=precedence_hint)
- component_trees_by_variant[variant_name] = root_component
- return component_trees_by_variant
+ winning_strategy_component = winning_strategy_components_by_variant.get(variant_name)
+ contributing_component = (
+ winning_strategy_component
+ if winning_strategy_component and winning_strategy_component.contributes
+ else None
+ )
+
+ variants[variant_name] = ComponentVariant(
+ component=root_component,
+ contributing_component=contributing_component,
+ strategy_config=context.config,
+ )
+
+ return variants
# This is called by the Event model in get_grouping_variants()
@@ -336,84 +368,97 @@ def get_grouping_variants_for_event(
event: Event, config: StrategyConfiguration | None = None
) -> dict[str, BaseVariant]:
"""Returns a dict of all grouping variants for this event."""
- # If a checksum is set the only variant that comes back from this
- # event is the checksum variant.
+ # If a checksum is set the only variant that comes back from this event is the checksum variant.
#
# TODO: Is there a reason we don't treat a checksum like a custom fingerprint, and run the other
# strategies but mark them as non-contributing, with explanations why?
- #
- # TODO: In the case where we have to hash the checksum to get a value in the right format, we
- # store the raw value as well (provided it's not so long that it will overflow the DB field).
- # Even when we do this, though, we don't set the raw value as non-cotributing, and we don't add
- # an "ignored because xyz" hint on the variant, which we should.
checksum = event.data.get("checksum")
if checksum:
if HASH_RE.match(checksum):
return {"checksum": ChecksumVariant(checksum)}
+ else:
+ return {
+ "hashed_checksum": HashedChecksumVariant(hash_from_values(checksum), checksum),
+ }
- variants: dict[str, BaseVariant] = {
- "hashed_checksum": HashedChecksumVariant(hash_from_values(checksum), checksum),
- }
+ # Otherwise we go to the various forms of grouping based on fingerprints and/or event data
+ # (stacktrace, message, etc.)
+ raw_fingerprint = event.data.get("fingerprint") or ["{{ default }}"]
+ fingerprint_info = event.data.get("_fingerprint_info", {})
+ fingerprint_type = get_fingerprint_type(raw_fingerprint)
+ resolved_fingerprint = (
+ raw_fingerprint
+ if fingerprint_type == "default"
+ else resolve_fingerprint_values(raw_fingerprint, event.data)
+ )
- # The legacy code path also supported arbitrary values here but
- # it will blow up if it results in more than 32 bytes of data
- # as this cannot be inserted into the database. (See GroupHash.hash)
- if len(checksum) <= 32:
- variants["checksum"] = ChecksumVariant(checksum)
+ # Run all of the event-data-based grouping strategies. Any which apply will create grouping
+ # components, which will then be grouped into variants by variant type (system, app, default).
+ context = GroupingContext(config or load_default_grouping_config())
+ strategy_component_variants: dict[str, ComponentVariant] = _get_variants_from_strategies(
+ event, context
+ )
- return variants
+ # Create a separate container for these for now to preserve the typing of
+ # `strategy_component_variants`
+ additional_variants: dict[str, BaseVariant] = {}
- # Otherwise we go to the various forms of fingerprint handling. If the event carries
- # a materialized fingerprint info from server side fingerprinting we forward it to the
- # variants which can export additional information about them.
- fingerprint = event.data.get("fingerprint") or ["{{ default }}"]
- fingerprint_info = event.data.get("_fingerprint_info", {})
- defaults_referenced = sum(1 if is_default_fingerprint_var(d) else 0 for d in fingerprint)
-
- if config is None:
- config = load_default_grouping_config()
- context = GroupingContext(config)
-
- # At this point we need to calculate the default event values. If the
- # fingerprint is salted we will wrap it.
- component_trees_by_variant = _get_component_trees_for_variants(event, context)
-
- # If no defaults are referenced we produce a single completely custom
- # fingerprint and mark all other variants as non-contributing
- if defaults_referenced == 0:
- variants = {}
- for variant_name, root_component in component_trees_by_variant.items():
- root_component.update(
- contributes=False,
- hint="custom fingerprint takes precedence",
- )
- variants[variant_name] = ComponentVariant(root_component, context.config)
+ # If the fingerprint is the default fingerprint, we can use the variants as is. If it's custom,
+ # we need to create an addiional fingerprint variant and mark the existing variants as
+ # non-contributing. And if it's hybrid, we'll replace the existing variants with "salted"
+ # versions which include the fingerprint.
+ if fingerprint_type == "custom":
+ for variant in strategy_component_variants.values():
+ variant.component.update(contributes=False, hint="custom fingerprint takes precedence")
- fingerprint = resolve_fingerprint_values(fingerprint, event.data)
if fingerprint_info.get("matched_rule", {}).get("is_builtin") is True:
- variants["built_in_fingerprint"] = BuiltInFingerprintVariant(
- fingerprint, fingerprint_info
+ additional_variants["built_in_fingerprint"] = BuiltInFingerprintVariant(
+ resolved_fingerprint, fingerprint_info
)
else:
- variants["custom_fingerprint"] = CustomFingerprintVariant(fingerprint, fingerprint_info)
-
- # If only the default is referenced, we can use the variants as is
- elif defaults_referenced == 1 and len(fingerprint) == 1:
- variants = {}
- for variant_name, root_component in component_trees_by_variant.items():
- variants[variant_name] = ComponentVariant(root_component, context.config)
-
- # Otherwise we need to "salt" our variants with the custom fingerprint value(s)
- else:
- variants = {}
- fingerprint = resolve_fingerprint_values(fingerprint, event.data)
- for variant_name, root_component in component_trees_by_variant.items():
- variants[variant_name] = SaltedComponentVariant(
- fingerprint, root_component, context.config, fingerprint_info
+ additional_variants["custom_fingerprint"] = CustomFingerprintVariant(
+ resolved_fingerprint, fingerprint_info
)
+ elif fingerprint_type == "hybrid":
+ for variant_name, variant in strategy_component_variants.items():
+ # Since we're reusing the variant names, when all of the variants are combined, these
+ # salted versions will replace the unsalted versions
+ additional_variants[variant_name] = SaltedComponentVariant.from_component_variant(
+ variant, resolved_fingerprint, fingerprint_info
+ )
+
+ final_variants = {
+ **strategy_component_variants,
+ # Add these in second, so the salted versions of any variants replace the unsalted versions
+ **additional_variants,
+ }
# Ensure we have a fallback hash if nothing else works out
- if not any(x.contributes for x in variants.values()):
- variants["fallback"] = FallbackVariant()
+ if not any(x.contributes for x in final_variants.values()):
+ final_variants["fallback"] = FallbackVariant()
- return variants
+ return final_variants
+
+
+def get_contributing_variant_and_component(
+ variants: dict[str, BaseVariant]
+) -> tuple[BaseVariant, ContributingComponent | None]:
+ if len(variants) == 1:
+ contributing_variant = list(variants.values())[0]
+ else:
+ contributing_variant = (
+ variants["app"]
+ # TODO: We won't need this 'if' once we stop returning both app and system contributing
+ # variants
+ if "app" in variants and variants["app"].contributes
+ # Other than in the broken app/system case, there should only ever be a single
+ # contributing variant
+ else [variant for variant in variants.values() if variant.contributes][0]
+ )
+ contributing_component = (
+ contributing_variant.contributing_component
+ if hasattr(contributing_variant, "contributing_component")
+ else None
+ )
+
+ return (contributing_variant, contributing_component)
diff --git a/src/sentry/grouping/component.py b/src/sentry/grouping/component.py
index 6a5fe7b28e8092..cfca678fe3a0e3 100644
--- a/src/sentry/grouping/component.py
+++ b/src/sentry/grouping/component.py
@@ -31,8 +31,13 @@ def _calculate_contributes[ValuesType](values: Sequence[ValuesType]) -> bool:
class BaseGroupingComponent[ValuesType: str | int | BaseGroupingComponent[Any]](ABC):
- """A grouping component is a recursive structure that is flattened
- into components to make a hash for grouping purposes.
+ """
+ A grouping component is a node in a tree describing the event data (exceptions, stacktraces,
+ messages, etc.) which can contribute to grouping. Each node's children, stored in the `values`
+ attribute, are either other grouping components or primitives representing the actual data.
+
+ For example, an exception component might have type, value, and stacktrace components as
+ children, and the type component might have the string "KeyError" as its child.
"""
hint: str | None = None
@@ -44,10 +49,9 @@ def __init__(
hint: str | None = None,
contributes: bool | None = None,
values: Sequence[ValuesType] | None = None,
- variant_provider: bool = False,
):
- self.variant_provider = variant_provider
-
+ # Use `upate` to set attribute values because it ensures `contributes` is set (if
+ # `contributes` is not provided, `update` will derive it from the `values` value)
self.update(
hint=hint,
contributes=contributes,
@@ -146,8 +150,9 @@ def shallow_copy(self) -> Self:
return copy
def iter_values(self) -> Generator[str | int]:
- """Recursively walks the component and flattens it into a list of
- values.
+ """
+ Recursively walks the component tree, gathering literal values from contributing
+ branches into a flat list.
"""
if self.contributes:
for value in self.values:
@@ -336,6 +341,17 @@ def __init__(
class ThreadsGroupingComponent(BaseGroupingComponent[StacktraceGroupingComponent]):
id: str = "threads"
+ frame_counts: Counter[str]
+
+ def __init__(
+ self,
+ values: Sequence[StacktraceGroupingComponent] | None = None,
+ hint: str | None = None,
+ contributes: bool | None = None,
+ frame_counts: Counter[str] | None = None,
+ ):
+ super().__init__(hint=hint, contributes=contributes, values=values)
+ self.frame_counts = frame_counts or Counter()
class CSPGroupingComponent(
@@ -404,3 +420,17 @@ class SystemGroupingComponent(
]
):
id: str = "system"
+
+
+ContributingComponent = (
+ ChainedExceptionGroupingComponent
+ | ExceptionGroupingComponent
+ | StacktraceGroupingComponent
+ | ThreadsGroupingComponent
+ | CSPGroupingComponent
+ | ExpectCTGroupingComponent
+ | ExpectStapleGroupingComponent
+ | HPKPGroupingComponent
+ | MessageGroupingComponent
+ | TemplateGroupingComponent
+)
diff --git a/src/sentry/grouping/enhancer/__init__.py b/src/sentry/grouping/enhancer/__init__.py
index 89853ac57c0253..1966ab111f6f66 100644
--- a/src/sentry/grouping/enhancer/__init__.py
+++ b/src/sentry/grouping/enhancer/__init__.py
@@ -173,7 +173,7 @@ def assemble_stacktrace_component(
frames: list[dict[str, Any]],
platform: str | None,
exception_data: dict[str, Any] | None = None,
- ) -> tuple[StacktraceGroupingComponent, bool]:
+ ) -> StacktraceGroupingComponent:
"""
This assembles a `stacktrace` grouping component out of the given
`frame` components and source frames.
@@ -205,7 +205,7 @@ def assemble_stacktrace_component(
frame_counts=frame_counts,
)
- return stacktrace_component, rust_results.invert_stacktrace
+ return stacktrace_component
def as_dict(self, with_rules=False):
rv = {
@@ -265,7 +265,7 @@ def loads(cls, data) -> Enhancements:
@classmethod
@sentry_sdk.tracing.trace
- def from_config_string(self, s, bases=None, id=None) -> Enhancements:
+ def from_config_string(cls, s, bases=None, id=None) -> Enhancements:
rust_enhancements = parse_rust_enhancements("config_string", s)
rules = parse_enhancements(s)
diff --git a/src/sentry/grouping/enhancer/actions.py b/src/sentry/grouping/enhancer/actions.py
index 10911971a0b23f..988a514b2fd576 100644
--- a/src/sentry/grouping/enhancer/actions.py
+++ b/src/sentry/grouping/enhancer/actions.py
@@ -56,8 +56,8 @@ def is_updater(self) -> bool:
def _from_config_structure(cls, val, version: int):
if isinstance(val, list):
return VarAction(val[0], val[1])
- flag, range = REVERSE_ACTION_FLAGS[val >> ACTION_BITSIZE]
- return FlagAction(ACTIONS[val & 0xF], flag, range)
+ flag, range_direction = REVERSE_ACTION_FLAGS[val >> ACTION_BITSIZE]
+ return FlagAction(ACTIONS[val & 0xF], flag, range_direction)
class FlagAction(EnhancementAction):
diff --git a/src/sentry/grouping/enhancer/matchers.py b/src/sentry/grouping/enhancer/matchers.py
index a86b9afa2f09a0..8a411da381fe96 100644
--- a/src/sentry/grouping/enhancer/matchers.py
+++ b/src/sentry/grouping/enhancer/matchers.py
@@ -202,7 +202,8 @@ def _to_config_structure(self, version):
if self.key == "family":
arg = "".join(_f for _f in [FAMILIES.get(x) for x in self.pattern.split(",")] if _f)
elif self.key == "app":
- arg = {True: "1", False: "0"}.get(bool_from_string(self.pattern), "")
+ boolified_pattern = bool_from_string(self.pattern)
+ arg = "1" if boolified_pattern is True else "0" if boolified_pattern is False else ""
else:
arg = self.pattern
return ("!" if self.negated else "") + MATCH_KEYS[self.key] + arg
diff --git a/src/sentry/grouping/fingerprinting/__init__.py b/src/sentry/grouping/fingerprinting/__init__.py
index 5bf069ee7da605..003083263266b8 100644
--- a/src/sentry/grouping/fingerprinting/__init__.py
+++ b/src/sentry/grouping/fingerprinting/__init__.py
@@ -240,6 +240,9 @@ def _get_release(self) -> list[_ReleaseInfo]:
return self._release
def get_values(self, match_type: str) -> list[dict[str, Any]]:
+ """
+ Pull values from all the spots in the event appropriate to the given match type.
+ """
return getattr(self, "_get_" + match_type)()
@@ -375,7 +378,12 @@ def _from_config_structure(
class FingerprintMatcher:
- def __init__(self, key: str, pattern: str, negated: bool = False) -> None:
+ def __init__(
+ self,
+ key: str, # The event attribute on which to match
+ pattern: str, # The value to match (or to not match, depending on `negated`)
+ negated: bool = False, # If True, match when `event[key]` does NOT equal `pattern`
+ ) -> None:
if key.startswith("tags."):
self.key = key
else:
@@ -422,7 +430,7 @@ def _positive_path_match(self, value: str | None) -> bool:
return False
def _positive_match(self, values: dict[str, Any]) -> bool:
- # path is special in that it tests against two values (abs_path and path)
+ # `path` is special in that it tests against two values (`abs_path` and `filename`)
if self.key == "path":
value = values.get("abs_path")
if self._positive_path_match(value):
@@ -433,7 +441,7 @@ def _positive_match(self, values: dict[str, Any]) -> bool:
return True
return False
- # message tests against value as well as this is what users expect
+ # message tests against exception value also, as this is what users expect
if self.key == "message":
for key in ("message", "value"):
value = values.get(key)
@@ -444,20 +452,13 @@ def _positive_match(self, values: dict[str, Any]) -> bool:
value = values.get(self.key)
if value is None:
return False
- elif self.key == "package":
+ elif self.key in ["package", "release"]:
if self._positive_path_match(value):
return True
- elif self.key == "family":
- flags = self.pattern.split(",")
- if "all" in flags or value in flags:
- return True
- elif self.key == "sdk":
+ elif self.key in ["family", "sdk"]:
flags = self.pattern.split(",")
if "all" in flags or value in flags:
return True
- elif self.key == "release":
- if self._positive_path_match(value):
- return True
elif self.key == "app":
ref_val = bool_from_string(self.pattern)
if ref_val is not None and ref_val == value:
@@ -583,7 +584,7 @@ def visit_fingerprinting_rules(
in_header = True
for child in children:
if isinstance(child, str):
- if in_header and child[:2] == "##":
+ if in_header and child.startswith("##"):
changelog.append(child[2:].rstrip())
else:
in_header = False
diff --git a/src/sentry/grouping/grouping_info.py b/src/sentry/grouping/grouping_info.py
index 5e7ff6e9f695da..92e4d60e6bc8ac 100644
--- a/src/sentry/grouping/grouping_info.py
+++ b/src/sentry/grouping/grouping_info.py
@@ -1,5 +1,4 @@
import logging
-from collections.abc import Mapping
from typing import Any
from sentry.api.exceptions import ResourceDoesNotExist
@@ -89,7 +88,7 @@ def _check_for_mismatched_hashes(
def get_grouping_info_from_variants(
- variants: Mapping[str, BaseVariant],
+ variants: dict[str, BaseVariant],
) -> dict[str, dict[str, Any]]:
"""
Given a dictionary of variant objects, create and return a copy of the dictionary in which each
diff --git a/src/sentry/grouping/ingest/grouphash_metadata.py b/src/sentry/grouping/ingest/grouphash_metadata.py
index 0ecbbf0aec307f..94dc3251c99766 100644
--- a/src/sentry/grouping/ingest/grouphash_metadata.py
+++ b/src/sentry/grouping/ingest/grouphash_metadata.py
@@ -6,6 +6,7 @@
from typing_extensions import TypeIs
from sentry.eventstore.models import Event
+from sentry.grouping.api import get_contributing_variant_and_component
from sentry.grouping.component import (
ChainedExceptionGroupingComponent,
CSPGroupingComponent,
@@ -135,32 +136,13 @@ def get_hash_basis_and_metadata(
metrics_timer_tags: MutableTags,
) -> tuple[HashBasis, HashingMetadata]:
hashing_metadata: HashingMetadata = {}
-
- # TODO: This (and `contributing_variant` below) are typed as `Any` so that we don't have to cast
- # them to whatever specific subtypes of `BaseVariant` and `GroupingComponent` (respectively)
- # each of the helper calls below requires. Casting once, to a type retrieved from a look-up,
- # doesn't work, but maybe there's a better way?
- contributing_variant: Any = (
- variants["app"]
- # TODO: We won't need this 'if' once we stop returning both app and system contributing
- # variants
- if "app" in variants and variants["app"].contributes
- else (
- variants["hashed_checksum"]
- # TODO: We won't need this 'if' once we stop returning both hashed and non-hashed
- # checksum contributing variants
- if "hashed_checksum" in variants
- # Other than in the broken app/system and hashed/raw checksum cases, there should only
- # ever be a single contributing variant
- else [variant for variant in variants.values() if variant.contributes][0]
- )
- )
- contributing_component: Any = (
- # There should only ever be a single contributing component here at the top level
- [value for value in contributing_variant.component.values if value.contributes][0]
- if hasattr(contributing_variant, "component")
- else None
- )
+ # TODO: These are typed as `Any` so that we don't have to cast them to whatever specific
+ # subtypes of `BaseVariant` and `GroupingComponent` (respectively) each of the helper calls
+ # below requires. Casting once, to a type retrieved from a look-up, doesn't work, but maybe
+ # there's a better way?
+ contributors = get_contributing_variant_and_component(variants)
+ contributing_variant: Any = contributors[0]
+ contributing_component: Any = contributors[1]
# Hybrid fingerprinting adds 'modified' to the beginning of the description of whatever method
# was used before the extra fingerprint was added. We classify events with hybrid fingerprints
@@ -278,7 +260,7 @@ def _get_stacktrace_hashing_metadata(
),
) -> StacktraceHashingMetadata:
return {
- "stacktrace_type": "in_app" if "in-app" in contributing_variant.description else "system",
+ "stacktrace_type": "in_app" if contributing_variant.variant_name == "app" else "system",
"stacktrace_location": (
"exception"
if "exception" in contributing_variant.description
diff --git a/src/sentry/grouping/ingest/hashing.py b/src/sentry/grouping/ingest/hashing.py
index 93b0c6e4865f61..0aa5d76f90b2f1 100644
--- a/src/sentry/grouping/ingest/hashing.py
+++ b/src/sentry/grouping/ingest/hashing.py
@@ -67,9 +67,7 @@ def _calculate_event_grouping(
# look at `grouping_config` to pick the right parameters.
event.data["fingerprint"] = event.data.data.get("fingerprint") or ["{{ default }}"]
apply_server_fingerprinting(
- event.data.data,
- get_fingerprinting_config_for_project(project),
- allow_custom_title=True,
+ event.data.data, get_fingerprinting_config_for_project(project)
)
with metrics.timer("event_manager.event.get_hashes", tags=metric_tags):
diff --git a/src/sentry/grouping/ingest/seer.py b/src/sentry/grouping/ingest/seer.py
index 234759e73ddd8a..2335348ef26e22 100644
--- a/src/sentry/grouping/ingest/seer.py
+++ b/src/sentry/grouping/ingest/seer.py
@@ -1,5 +1,4 @@
import logging
-from collections.abc import Mapping
from dataclasses import asdict
from typing import Any
@@ -17,11 +16,14 @@
from sentry.seer.similarity.similar_issues import get_similarity_data_from_seer
from sentry.seer.similarity.types import SimilarIssuesEmbeddingsRequest
from sentry.seer.similarity.utils import (
+ SEER_INELIGIBLE_EVENT_PLATFORMS,
ReferrerOptions,
- event_content_is_seer_eligible,
+ event_content_has_stacktrace,
filter_null_from_string,
get_stacktrace_string_with_metrics,
+ has_too_many_contributing_frames,
killswitch_enabled,
+ record_did_call_seer_metric,
)
from sentry.utils import metrics
from sentry.utils.circuit_breaker2 import CircuitBreaker
@@ -30,7 +32,7 @@
logger = logging.getLogger("sentry.events.grouping")
-def should_call_seer_for_grouping(event: Event, variants: Mapping[str, BaseVariant]) -> bool:
+def should_call_seer_for_grouping(event: Event, variants: dict[str, BaseVariant]) -> bool:
"""
Use event content, feature flags, rate limits, killswitches, seer health, etc. to determine
whether a call to Seer should be made.
@@ -39,14 +41,15 @@ def should_call_seer_for_grouping(event: Event, variants: Mapping[str, BaseVaria
project = event.project
# Check both of these before returning based on either so we can gather metrics on their results
- content_is_eligible = event_content_is_seer_eligible(event)
+ content_is_eligible = _event_content_is_seer_eligible(event)
seer_enabled_for_project = _project_has_similarity_grouping_enabled(project)
if not (content_is_eligible and seer_enabled_for_project):
return False
if (
_has_customized_fingerprint(event, variants)
- or killswitch_enabled(project.id, event)
+ or _has_too_many_contributing_frames(event, variants)
+ or killswitch_enabled(project.id, ReferrerOptions.INGEST, event)
or _circuit_breaker_broken(event, project)
# The rate limit check has to be last (see below) but rate-limiting aside, call this after other checks
# because it calculates the stacktrace string, which we only want to spend the time to do if we already
@@ -66,6 +69,42 @@ def should_call_seer_for_grouping(event: Event, variants: Mapping[str, BaseVaria
return True
+def _event_content_is_seer_eligible(event: Event) -> bool:
+ """
+ Determine if an event's contents makes it fit for using with Seer's similar issues model.
+ """
+ if not event_content_has_stacktrace(event):
+ metrics.incr(
+ "grouping.similarity.event_content_seer_eligible",
+ sample_rate=options.get("seer.similarity.metrics_sample_rate"),
+ tags={"eligible": False, "blocker": "no-stacktrace"},
+ )
+ return False
+
+ if event.platform in SEER_INELIGIBLE_EVENT_PLATFORMS:
+ metrics.incr(
+ "grouping.similarity.event_content_seer_eligible",
+ sample_rate=options.get("seer.similarity.metrics_sample_rate"),
+ tags={"eligible": False, "blocker": "unsupported-platform"},
+ )
+ return False
+
+ metrics.incr(
+ "grouping.similarity.event_content_seer_eligible",
+ sample_rate=options.get("seer.similarity.metrics_sample_rate"),
+ tags={"eligible": True, "blocker": "none"},
+ )
+ return True
+
+
+def _has_too_many_contributing_frames(event: Event, variants: dict[str, BaseVariant]) -> bool:
+ if has_too_many_contributing_frames(event, variants, ReferrerOptions.INGEST):
+ record_did_call_seer_metric(call_made=False, blocker="excess-frames")
+ return True
+
+ return False
+
+
def _project_has_similarity_grouping_enabled(project: Project) -> bool:
# TODO: This is a hack to get ingest to turn on for projects as soon as they're backfilled. When
# the backfill script completes, we turn on this option, enabling ingest immediately rather than
@@ -86,7 +125,7 @@ def _project_has_similarity_grouping_enabled(project: Project) -> bool:
# combined with some other value). To the extent to which we're then using this function to decide
# whether or not to call Seer, this means that the calculations giving rise to the default part of
# the value never involve Seer input. In the long run, we probably want to change that.
-def _has_customized_fingerprint(event: Event, variants: Mapping[str, BaseVariant]) -> bool:
+def _has_customized_fingerprint(event: Event, variants: dict[str, BaseVariant]) -> bool:
fingerprint = event.data.get("fingerprint", [])
if "{{ default }}" in fingerprint:
@@ -96,22 +135,14 @@ def _has_customized_fingerprint(event: Event, variants: Mapping[str, BaseVariant
# Hybrid fingerprinting ({{ default }} + some other value(s))
else:
- metrics.incr(
- "grouping.similarity.did_call_seer",
- sample_rate=options.get("seer.similarity.metrics_sample_rate"),
- tags={"call_made": False, "blocker": "hybrid-fingerprint"},
- )
+ record_did_call_seer_metric(call_made=False, blocker="hybrid-fingerprint")
return True
# Fully customized fingerprint (from either us or the user)
fingerprint_variant = variants.get("custom_fingerprint") or variants.get("built_in_fingerprint")
if fingerprint_variant:
- metrics.incr(
- "grouping.similarity.did_call_seer",
- sample_rate=options.get("seer.similarity.metrics_sample_rate"),
- tags={"call_made": False, "blocker": fingerprint_variant.type},
- )
+ record_did_call_seer_metric(call_made=False, blocker=fingerprint_variant.type)
return True
return False
@@ -133,12 +164,7 @@ def _ratelimiting_enabled(event: Event, project: Project) -> bool:
if ratelimiter.backend.is_limited("seer:similarity:global-limit", **global_ratelimit):
logger_extra["limit_per_sec"] = global_limit_per_sec
logger.warning("should_call_seer_for_grouping.global_ratelimit_hit", extra=logger_extra)
-
- metrics.incr(
- "grouping.similarity.did_call_seer",
- sample_rate=options.get("seer.similarity.metrics_sample_rate"),
- tags={"call_made": False, "blocker": "global-rate-limit"},
- )
+ record_did_call_seer_metric(call_made=False, blocker="global-rate-limit")
return True
@@ -147,12 +173,7 @@ def _ratelimiting_enabled(event: Event, project: Project) -> bool:
):
logger_extra["limit_per_sec"] = project_limit_per_sec
logger.warning("should_call_seer_for_grouping.project_ratelimit_hit", extra=logger_extra)
-
- metrics.incr(
- "grouping.similarity.did_call_seer",
- sample_rate=options.get("seer.similarity.metrics_sample_rate"),
- tags={"call_made": False, "blocker": "project-rate-limit"},
- )
+ record_did_call_seer_metric(call_made=False, blocker="project-rate-limit")
return True
@@ -173,29 +194,18 @@ def _circuit_breaker_broken(event: Event, project: Project) -> bool:
**breaker_config,
},
)
- metrics.incr(
- "grouping.similarity.did_call_seer",
- sample_rate=options.get("seer.similarity.metrics_sample_rate"),
- tags={"call_made": False, "blocker": "circuit-breaker"},
- )
+ record_did_call_seer_metric(call_made=False, blocker="circuit-breaker")
return circuit_broken
-def _has_empty_stacktrace_string(event: Event, variants: Mapping[str, BaseVariant]) -> bool:
+def _has_empty_stacktrace_string(event: Event, variants: dict[str, BaseVariant]) -> bool:
stacktrace_string = get_stacktrace_string_with_metrics(
get_grouping_info_from_variants(variants), event.platform, ReferrerOptions.INGEST
)
if not stacktrace_string:
if stacktrace_string == "":
- metrics.incr(
- "grouping.similarity.did_call_seer",
- sample_rate=options.get("seer.similarity.metrics_sample_rate"),
- tags={
- "call_made": False,
- "blocker": "empty-stacktrace-string",
- },
- )
+ record_did_call_seer_metric(call_made=False, blocker="empty-stacktrace-string")
return True
# Store the stacktrace string in the event so we only calculate it once. We need to pop it
# later so it isn't stored in the database.
@@ -205,7 +215,7 @@ def _has_empty_stacktrace_string(event: Event, variants: Mapping[str, BaseVarian
def get_seer_similar_issues(
event: Event,
- variants: Mapping[str, BaseVariant],
+ variants: dict[str, BaseVariant],
num_neighbors: int = 1,
) -> tuple[dict[str, Any], GroupHash | None]:
"""
@@ -281,16 +291,12 @@ def get_seer_similar_issues(
def maybe_check_seer_for_matching_grouphash(
- event: Event, variants: Mapping[str, BaseVariant], all_grouphashes: list[GroupHash]
+ event: Event, variants: dict[str, BaseVariant], all_grouphashes: list[GroupHash]
) -> GroupHash | None:
seer_matched_grouphash = None
if should_call_seer_for_grouping(event, variants):
- metrics.incr(
- "grouping.similarity.did_call_seer",
- sample_rate=options.get("seer.similarity.metrics_sample_rate"),
- tags={"call_made": True, "blocker": "none"},
- )
+ record_did_call_seer_metric(call_made=True, blocker="none")
try:
# If no matching group is found in Seer, we'll still get back result
diff --git a/src/sentry/grouping/parameterization.py b/src/sentry/grouping/parameterization.py
index 49309f7d4ca91c..412b254ccc6ac2 100644
--- a/src/sentry/grouping/parameterization.py
+++ b/src/sentry/grouping/parameterization.py
@@ -20,19 +20,19 @@
@dataclasses.dataclass
class ParameterizationRegex:
- name: str # name of the pattern also used as group name in combined regex
+ name: str # name of the pattern (also used as group name in combined regex)
raw_pattern: str # regex pattern w/o matching group name
lookbehind: str | None = None # positive lookbehind prefix if needed
lookahead: str | None = None # positive lookahead postfix if needed
counter: int = 0
- # These need to be used with `(?x)` tells the regex compiler to ignore comments
+ # These need to be used with `(?x)`, to tell the regex compiler to ignore comments
# and unescaped whitespace, so we can use newlines and indentation for better legibility.
@property
def pattern(self) -> str:
"""
- Returns the regex pattern for with as a named matching group and lookbehind/lookahead if needed.
+ Returns the regex pattern with a named matching group and lookbehind/lookahead if needed.
"""
prefix = rf"(?<={self.lookbehind})" if self.lookbehind else ""
postfix = rf"(?={self.lookahead})" if self.lookahead else ""
@@ -41,7 +41,7 @@ def pattern(self) -> str:
@property
def compiled_pattern(self) -> re.Pattern[str]:
"""
- Returns the compiled regex pattern for with as a named matching group and lookbehind/lookahead if needed.
+ Returns the compiled regex pattern with a named matching group and lookbehind/lookahead if needed.
"""
if not hasattr(self, "_compiled_pattern"):
self._compiled_pattern = re.compile(rf"(?x){self.pattern}")
@@ -189,8 +189,8 @@ class ParameterizationCallable:
us more flexibility than just using regex.
"""
- name: str # name of the pattern also used as group name in combined regex
- apply: Callable[[str], tuple[str, int]] # function to modifying the input string
+ name: str # name of the pattern (also used as group name in combined regex)
+ apply: Callable[[str], tuple[str, int]] # function for modifying the input string
counter: int = 0
diff --git a/src/sentry/grouping/strategies/base.py b/src/sentry/grouping/strategies/base.py
index 8157f6232d364a..a974391cdc60ba 100644
--- a/src/sentry/grouping/strategies/base.py
+++ b/src/sentry/grouping/strategies/base.py
@@ -1,6 +1,6 @@
import inspect
from collections.abc import Callable, Iterator, Sequence
-from typing import Any, Generic, Protocol, TypeVar, overload
+from typing import Any, Generic, Protocol, Self, TypeVar, overload
from sentry import projectoptions
from sentry.eventstore.models import Event
@@ -62,7 +62,7 @@ def strategy(
"""
Registers a strategy
- :param ids: The strategy/delegate IDs with which to register
+ :param ids: The strategy/delegate IDs to register
:param interface: Which interface type should be dispatched to this strategy
:param score: Determines precedence of strategies. For example exception
strategy scores higher than message strategy, so if both interfaces are
@@ -90,6 +90,7 @@ def decorator(f: StrategyFunc[ConcreteInterface]) -> Strategy[ConcreteInterface]
class GroupingContext:
def __init__(self, strategy_config: "StrategyConfiguration"):
+ # The initial context is essentially the grouping config options
self._stack = [strategy_config.initial_context]
self.config = strategy_config
self.push()
@@ -99,12 +100,13 @@ def __setitem__(self, key: str, value: ContextValue) -> None:
self._stack[-1][key] = value
def __getitem__(self, key: str) -> ContextValue:
+ # Walk down the stack from the top and return the first instance of `key` found
for d in reversed(self._stack):
if key in d:
return d[key]
raise KeyError(key)
- def __enter__(self) -> "GroupingContext":
+ def __enter__(self) -> Self:
self.push()
return self
@@ -142,7 +144,7 @@ def get_single_grouping_component(
def get_single_grouping_component(
self, interface: Interface, *, event: Event, **kwargs: Any
- ) -> BaseGroupingComponent:
+ ) -> FrameGroupingComponent | ExceptionGroupingComponent | StacktraceGroupingComponent:
"""Invokes a delegate grouping strategy. If no such delegate is
configured a fallback grouping component is returned.
"""
@@ -200,9 +202,9 @@ def __repr__(self) -> str:
def _invoke(
self, func: Callable[..., ReturnedVariants], *args: Any, **kwargs: Any
) -> ReturnedVariants:
- # We forcefully override strategy here. This lets a strategy
+ # We forcefully override strategy here. This lets a strategy
# function always access its metadata and directly forward it to
- # subcomponents without having to filter out strategy.
+ # subcomponents.
kwargs["strategy"] = self
return func(*args, **kwargs)
@@ -218,7 +220,7 @@ def variant_processor(self, func: VariantProcessor) -> VariantProcessor:
def get_grouping_component(
self, event: Event, context: GroupingContext, variant: str | None = None
- ) -> None | BaseGroupingComponent | ReturnedVariants:
+ ) -> None | BaseGroupingComponent[Any] | ReturnedVariants:
"""Given a specific variant this calculates the grouping component."""
args = []
iface = event.interfaces.get(self.interface_name)
@@ -244,54 +246,33 @@ def get_grouping_components(self, event: Event, context: GroupingContext) -> Ret
assert isinstance(components_by_variant, dict)
final_components_by_variant = {}
- has_mandatory_hashes = False
- mandatory_contributing_variants_by_hash = {}
- optional_contributing_variants = []
- prevent_contribution = None
+ priority_contributing_variants_by_hash = {}
+ non_priority_contributing_variants = []
for variant_name, component in components_by_variant.items():
- is_mandatory = variant_name.startswith("!")
+ is_priority = variant_name.startswith("!")
variant_name = variant_name.lstrip("!")
- if is_mandatory:
- has_mandatory_hashes = True
-
if component.contributes:
- if is_mandatory:
- mandatory_contributing_variants_by_hash[component.get_hash()] = variant_name
+ # Track priority and non-priority contributing hashes separately, so the latter can
+ # be deduped against the former
+ if is_priority:
+ priority_contributing_variants_by_hash[component.get_hash()] = variant_name
else:
- optional_contributing_variants.append(variant_name)
+ non_priority_contributing_variants.append(variant_name)
final_components_by_variant[variant_name] = component
- prevent_contribution = has_mandatory_hashes and not mandatory_contributing_variants_by_hash
-
- for variant_name in optional_contributing_variants:
+ # Mark any non-priority duplicates of priority hashes as non-contributing
+ for variant_name in non_priority_contributing_variants:
component = final_components_by_variant[variant_name]
-
- # In case this variant contributes we need to check two things
- # here: if we did not have a system match we need to prevent
- # it from contributing. Additionally if it matches the system
- # component we also do not want the variant to contribute but
- # with a different message.
- if prevent_contribution:
+ hash_value = component.get_hash()
+ duplicate_of = priority_contributing_variants_by_hash.get(hash_value)
+ if duplicate_of is not None:
component.update(
contributes=False,
- hint="ignored because %s variant is not used"
- % (
- list(mandatory_contributing_variants_by_hash.values())[0]
- if len(mandatory_contributing_variants_by_hash) == 1
- else "other mandatory"
- ),
+ hint="ignored because hash matches %s variant" % duplicate_of,
)
- else:
- hash_value = component.get_hash()
- duplicate_of = mandatory_contributing_variants_by_hash.get(hash_value)
- if duplicate_of is not None:
- component.update(
- contributes=False,
- hint="ignored because hash matches %s variant" % duplicate_of,
- )
if self.variant_processor_func is not None:
final_components_by_variant = self._invoke(
diff --git a/src/sentry/grouping/strategies/legacy.py b/src/sentry/grouping/strategies/legacy.py
index d1c302f5f3a375..d1cd5232fef73c 100644
--- a/src/sentry/grouping/strategies/legacy.py
+++ b/src/sentry/grouping/strategies/legacy.py
@@ -111,9 +111,9 @@ def is_recursion_legacy(frame1: Frame, frame2: Frame) -> bool:
def remove_module_outliers_legacy(module: str, platform: str) -> tuple[str, str | None]:
"""Remove things that augment the module but really should not."""
if platform == "java":
- if module[:35] == "sun.reflect.GeneratedMethodAccessor":
+ if module.startswith("sun.reflect.GeneratedMethodAccessor"):
return "sun.reflect.GeneratedMethodAccessor", "removed reflection marker"
- if module[:44] == "jdk.internal.reflect.GeneratedMethodAccessor":
+ if module.startswith("jdk.internal.reflect.GeneratedMethodAccessor"):
return "jdk.internal.reflect.GeneratedMethodAccessor", "removed reflection marker"
old_module = module
module = _java_reflect_enhancer_re.sub(r"\1", module)
@@ -447,7 +447,7 @@ def stacktrace_legacy(
frames_for_filtering.append(frame.get_raw_data())
prev_frame = frame
- stacktrace_component, _ = context.config.enhancements.assemble_stacktrace_component(
+ stacktrace_component = context.config.enhancements.assemble_stacktrace_component(
frame_components, frames_for_filtering, event.platform
)
stacktrace_component.update(contributes=contributes, hint=hint)
diff --git a/src/sentry/grouping/strategies/newstyle.py b/src/sentry/grouping/strategies/newstyle.py
index bd2dc77655d8ae..d32174de0bf638 100644
--- a/src/sentry/grouping/strategies/newstyle.py
+++ b/src/sentry/grouping/strategies/newstyle.py
@@ -152,7 +152,6 @@ def get_filename_component(
new_filename = _java_assist_enhancer_re.sub(r"\1", filename)
if new_filename != filename:
filename_component.update(values=[new_filename], hint="cleaned javassist parts")
- filename = new_filename
return filename_component
@@ -176,11 +175,11 @@ def get_module_component(
elif platform == "java":
if "$$Lambda$" in module:
module_component.update(contributes=False, hint="ignored java lambda")
- if module[:35] == "sun.reflect.GeneratedMethodAccessor":
+ if module.startswith("sun.reflect.GeneratedMethodAccessor"):
module_component.update(
values=["sun.reflect.GeneratedMethodAccessor"], hint="removed reflection marker"
)
- elif module[:44] == "jdk.internal.reflect.GeneratedMethodAccessor":
+ elif module.startswith("jdk.internal.reflect.GeneratedMethodAccessor"):
module_component.update(
values=["jdk.internal.reflect.GeneratedMethodAccessor"],
hint="removed reflection marker",
@@ -351,8 +350,9 @@ def frame(
if context["javascript_fuzzing"] and get_behavior_family_for_platform(platform) == "javascript":
func = frame.raw_function or frame.function
if func:
+ # Strip leading namespacing, i.e., turn `some.module.path.someFunction` into
+ # `someFunction` and `someObject.someMethod` into `someMethod`
func = func.rsplit(".", 1)[-1]
- # special case empty functions not to have a hint
if not func:
function_component.update(contributes=False)
elif func in (
@@ -412,7 +412,7 @@ def stacktrace(
return call_with_variants(
_single_stacktrace_variant,
- ["!system", "app"],
+ ["!app", "system"],
interface,
event=event,
context=context,
@@ -463,7 +463,7 @@ def _single_stacktrace_variant(
contributes=False, hint="ignored single non-URL JavaScript frame"
)
- stacktrace_component, _ = context.config.enhancements.assemble_stacktrace_component(
+ stacktrace_component = context.config.enhancements.assemble_stacktrace_component(
frame_components,
frames_for_filtering,
event.platform,
@@ -518,15 +518,9 @@ def single_exception(
if exception.mechanism:
if exception.mechanism.synthetic:
- # Ignore synthetic exceptions as they are produced from platform
- # specific error codes.
- #
- # For example there can be crashes with EXC_ACCESS_VIOLATION_* on Windows with
- # the same exact stacktrace as a crash with EXC_BAD_ACCESS on macOS.
- #
- # Do not update type component of system variant, such that regex
- # can be continuously modified without unnecessarily creating new
- # groups.
+ # Ignore the error type for synthetic exceptions as it can vary by platform and doesn't
+ # actually carry any meaning with respect to what went wrong. (Synthetic exceptions
+ # are dummy excepttions created by the SDK in order to harvest a stacktrace.)
type_component.update(contributes=False, hint="ignored because exception is synthetic")
system_type_component.update(
contributes=False, hint="ignored because exception is synthetic"
@@ -549,7 +543,7 @@ def single_exception(
)
else:
stacktrace_components_by_variant = {
- "app": StacktraceGroupingComponent(),
+ "!app": StacktraceGroupingComponent(),
}
exception_components_by_variant = {}
@@ -615,7 +609,7 @@ def chained_exception(
# Get all the exceptions to consider.
all_exceptions = interface.exceptions()
- # Get the grouping components for all exceptions up front, as we'll need them in a few places and only want to compute them once.
+ # For each exception, create a dictionary of grouping components by variant name
exception_components_by_exception = {
id(exception): context.get_grouping_components_by_variant(exception, event=event, **meta)
for exception in all_exceptions
@@ -638,12 +632,16 @@ def chained_exception(
if main_exception_id:
event.data["main_exception_id"] = main_exception_id
- # Case 1: we have a single exception, use the single exception
- # component directly to avoid a level of nesting
+ # Cases 1 and 2: Either this never was a chained exception (this is our entry point for single
+ # exceptions, too), or this is a chained exception consisting solely of an exception group and a
+ # single inner exception. In the former case, all we have is the single exception component, so
+ # return it. In the latter case, the there's no value-add to the wrapper, so discard it and just
+ # return the component for the inner exception.
if len(exceptions) == 1:
return exception_components_by_exception[id(exceptions[0])]
- # Case 2: produce a component for each chained exception
+ # Case 3: This is either a chained exception or an exception group containing at least two inner
+ # exceptions. Either way, we need to wrap our exception components in a chained exception component.
exception_components_by_variant: dict[str, list[ExceptionGroupingComponent]] = {}
for exception in exceptions:
@@ -670,7 +668,7 @@ def chained_exception(
# See https://github.com/getsentry/rfcs/blob/main/text/0079-exception-groups.md#sentry-issue-grouping
def filter_exceptions_for_exception_groups(
exceptions: list[SingleException],
- exception_components: dict[int, ReturnedVariants],
+ exception_components: dict[int, dict[str, ExceptionGroupingComponent]],
event: Event,
) -> list[SingleException]:
# This function only filters exceptions if there are at least two exceptions.
@@ -710,8 +708,8 @@ def get_child_exceptions(exception: SingleException) -> list[SingleException]:
node = exception_tree.get(exception_id)
return node.children if node else []
- # This recursive generator gets the "top-level exceptions", and is used below.
- # "Top-level exceptions are those that are the first descendants of the root that are not exception groups.
+ # This recursive generator gets the "top-level exceptions," and is used below.
+ # Top-level exceptions are those that are the first descendants of the root that are not exception groups.
# For examples, see https://github.com/getsentry/rfcs/blob/main/text/0079-exception-groups.md#sentry-issue-grouping
def get_top_level_exceptions(
exception: SingleException,
@@ -753,8 +751,8 @@ def get_first_path(exception: SingleException) -> Generator[SingleException]:
# If there's only one distinct top-level exception in the group,
# use it and its first-path children, but throw out the exception group and any copies.
# For example, Group<['Da', 'Da', 'Da']> should just be treated as a single 'Da'.
- # We'll also set the main_exception_id, which is used in the extract_metadata function
- # in src/sentry/eventtypes/error.py - which will ensure the issue is titled by this
+ # We'll also set `main_exception_id`, which is used in the `extract_metadata` function
+ # in `src/sentry/eventtypes/error.py`, in order to ensure the issue is titled by this
# item rather than the exception group.
if len(distinct_top_level_exceptions) == 1:
main_exception = distinct_top_level_exceptions[0]
@@ -826,7 +824,7 @@ def _filtered_threads(
stacktrace, event=event, **meta
).items():
thread_components_by_variant[variant_name] = ThreadsGroupingComponent(
- values=[stacktrace_component]
+ values=[stacktrace_component], frame_counts=stacktrace_component.frame_counts
)
return thread_components_by_variant
diff --git a/src/sentry/grouping/strategies/utils.py b/src/sentry/grouping/strategies/utils.py
index 68d12d6a4101cc..0aace20dbdf108 100644
--- a/src/sentry/grouping/strategies/utils.py
+++ b/src/sentry/grouping/strategies/utils.py
@@ -12,8 +12,8 @@ def remove_non_stacktrace_variants(variants: ReturnedVariants) -> ReturnedVarian
non_contributing_components = []
stacktrace_variants = set()
- # In case any of the variants has a contributing stacktrace, we want
- # to make all other variants non contributing.
+ # If at least one variant has a contributing stacktrace, we want to mark all variants without a
+ # stacktrace as non-contributing.
for variant_name, component in variants.items():
stacktrace_iter = component.iter_subcomponents(
id="stacktrace", recursive=True, only_contributing=True
diff --git a/src/sentry/grouping/utils.py b/src/sentry/grouping/utils.py
index f94a6fe16399a4..b5952f68a828c0 100644
--- a/src/sentry/grouping/utils.py
+++ b/src/sentry/grouping/utils.py
@@ -1,32 +1,65 @@
+from __future__ import annotations
+
import re
+from collections.abc import Iterable, Mapping
from hashlib import md5
+from re import Match
+from typing import TYPE_CHECKING, Any, Literal
+from uuid import UUID
from django.utils.encoding import force_bytes
+from sentry.db.models.fields.node import NodeData
from sentry.stacktraces.processing import get_crash_frame_from_event_data
from sentry.utils.safe import get_path
+if TYPE_CHECKING:
+ from sentry.grouping.component import ExceptionGroupingComponent
+
+
_fingerprint_var_re = re.compile(r"\{\{\s*(\S+)\s*\}\}")
-def parse_fingerprint_var(value):
+def parse_fingerprint_var(value: str) -> str | None:
match = _fingerprint_var_re.match(value)
if match is not None and match.end() == len(value):
return match.group(1)
+ return None
-def is_default_fingerprint_var(value):
+def is_default_fingerprint_var(value: str) -> bool:
return parse_fingerprint_var(value) == "default"
-def hash_from_values(values):
+def hash_from_values(values: Iterable[str | int | UUID | ExceptionGroupingComponent]) -> str:
+ """
+ Primarily used at the end of the grouping process, to get a final hash value once the all of the
+ variants have been constructed, but also used as a hack to compare exception components (by
+ stringifying their reprs) when calculating variants for chained exceptions.
+ """
result = md5()
for value in values:
result.update(force_bytes(value, errors="replace"))
return result.hexdigest()
-def bool_from_string(value):
+def get_fingerprint_type(fingerprint: list[str]) -> Literal["default", "hybrid", "custom"]:
+ return (
+ "default"
+ if len(fingerprint) == 1 and is_default_fingerprint_var(fingerprint[0])
+ else (
+ "hybrid"
+ if any(is_default_fingerprint_var(entry) for entry in fingerprint)
+ else "custom"
+ )
+ )
+
+
+def bool_from_string(value: str) -> bool | None:
+ """
+ Convert various string representations of boolean values ("1", "yes", "true", "0", "no",
+ "false") into actual booleans. Return `None` for all other inputs.
+ """
if value:
value = value.lower()
if value in ("1", "yes", "true"):
@@ -34,8 +67,10 @@ def bool_from_string(value):
elif value in ("0", "no", "false"):
return False
+ return None
+
-def get_fingerprint_value(var, data):
+def get_fingerprint_value(var: str, data: NodeData | Mapping[str, Any]) -> str | None:
if var == "transaction":
return data.get("transaction") or ""
elif var == "message":
@@ -78,15 +113,18 @@ def get_fingerprint_value(var, data):
elif var == "logger":
return data.get("logger") or ""
elif var.startswith("tags."):
+ # Turn "tags.some_tag" into just "some_tag"
tag = var[5:]
for t, value in data.get("tags") or ():
if t == tag:
return value
return "" % tag
+ else:
+ return None
-def resolve_fingerprint_values(values, event_data):
- def _get_fingerprint_value(value):
+def resolve_fingerprint_values(values: list[str], event_data: NodeData) -> list[str]:
+ def _get_fingerprint_value(value: str) -> str:
var = parse_fingerprint_var(value)
if var is None:
return value
@@ -98,8 +136,8 @@ def _get_fingerprint_value(value):
return [_get_fingerprint_value(x) for x in values]
-def expand_title_template(template, event_data):
- def _handle_match(match):
+def expand_title_template(template: str, event_data: Mapping[str, Any]) -> str:
+ def _handle_match(match: Match[str]) -> str:
var = match.group(1)
rv = get_fingerprint_value(var, event_data)
if rv is not None:
diff --git a/src/sentry/grouping/variants.py b/src/sentry/grouping/variants.py
index 9be16481967921..f25322e66ade9b 100644
--- a/src/sentry/grouping/variants.py
+++ b/src/sentry/grouping/variants.py
@@ -1,10 +1,12 @@
from __future__ import annotations
from abc import ABC, abstractmethod
-from typing import TYPE_CHECKING, NotRequired, TypedDict
+from collections.abc import Mapping
+from typing import TYPE_CHECKING, Any, NotRequired, Self, TypedDict
from sentry.grouping.component import (
AppGroupingComponent,
+ ContributingComponent,
DefaultGroupingComponent,
SystemGroupingComponent,
)
@@ -24,8 +26,11 @@ class FingerprintVariantMetadata(TypedDict):
class BaseVariant(ABC):
- # This is true if `get_hash` does not return `None`.
- contributes = True
+ variant_name: str | None = None
+
+ @property
+ def contributes(self) -> bool:
+ return True
@property
@abstractmethod
@@ -35,18 +40,20 @@ def get_hash(self) -> str | None:
return None
@property
- def description(self):
+ def description(self) -> str:
return self.type
- def _get_metadata_as_dict(self):
+ # This has to return `Mapping` rather than `dict` so that subtypes can override the return value
+ # with a TypedDict if they choose. See https://github.com/python/mypy/issues/4976.
+ def _get_metadata_as_dict(self) -> Mapping[str, Any]:
return {}
- def as_dict(self):
+ def as_dict(self) -> dict[str, Any]:
rv = {"type": self.type, "description": self.description, "hash": self.get_hash()}
rv.update(self._get_metadata_as_dict())
return rv
- def __repr__(self):
+ def __repr__(self) -> str:
return f"<{self.__class__.__name__} {self.get_hash()!r} ({self.type})>"
def __eq__(self, other: object) -> bool:
@@ -70,7 +77,7 @@ def __init__(self, checksum: str):
def get_hash(self) -> str | None:
return self.checksum
- def _get_metadata_as_dict(self):
+ def _get_metadata_as_dict(self) -> Mapping[str, str]:
return {"checksum": self.checksum}
@@ -82,7 +89,7 @@ def __init__(self, checksum: str, raw_checksum: str):
self.checksum = checksum
self.raw_checksum = raw_checksum
- def _get_metadata_as_dict(self):
+ def _get_metadata_as_dict(self) -> Mapping[str, str]:
return {"checksum": self.checksum, "raw_checksum": self.raw_checksum}
@@ -109,14 +116,14 @@ class PerformanceProblemVariant(BaseVariant):
description = "performance problem"
contributes = True
- def __init__(self, event_performance_problem):
+ def __init__(self, event_performance_problem: Any):
self.event_performance_problem = event_performance_problem
self.problem = event_performance_problem.problem
def get_hash(self) -> str | None:
return self.problem.fingerprint
- def _get_metadata_as_dict(self):
+ def _get_metadata_as_dict(self) -> Mapping[str, Any]:
problem_data = self.problem.to_dict()
evidence_hashes = self.event_performance_problem.evidence_hashes
@@ -124,35 +131,40 @@ def _get_metadata_as_dict(self):
class ComponentVariant(BaseVariant):
- """A component variant is a variant that produces a hash from the
- `BaseGroupingComponent` it encloses.
- """
+ """A variant that produces a hash from the `BaseGroupingComponent` it encloses."""
type = "component"
def __init__(
self,
+ # The root of the component tree
component: AppGroupingComponent | SystemGroupingComponent | DefaultGroupingComponent,
+ # The highest non-root contributing component in the tree, representing the overall grouping
+ # method (exception, threads, message, etc.). For non-contributing variants, this will be
+ # None.
+ contributing_component: ContributingComponent | None,
strategy_config: StrategyConfiguration,
):
self.component = component
self.config = strategy_config
+ self.contributing_component = contributing_component
+ self.variant_name = self.component.id # "app", "system", or "default"
@property
- def description(self):
+ def description(self) -> str:
return self.component.description
@property
- def contributes(self):
+ def contributes(self) -> bool:
return self.component.contributes
def get_hash(self) -> str | None:
return self.component.get_hash()
- def _get_metadata_as_dict(self):
+ def _get_metadata_as_dict(self) -> Mapping[str, Any]:
return {"component": self.component.as_dict(), "config": self.config.as_dict()}
- def __repr__(self):
+ def __repr__(self) -> str:
return super().__repr__() + f" contributes={self.contributes} ({self.description})"
@@ -190,7 +202,7 @@ def __init__(self, fingerprint: list[str], fingerprint_info: FingerprintInfo):
self.info = fingerprint_info
@property
- def description(self):
+ def description(self) -> str:
return "custom fingerprint"
def get_hash(self) -> str | None:
@@ -201,12 +213,12 @@ def _get_metadata_as_dict(self) -> FingerprintVariantMetadata:
class BuiltInFingerprintVariant(CustomFingerprintVariant):
- """A built-in, Sentry defined fingerprint."""
+ """A built-in, Sentry-defined fingerprint."""
type = "built_in_fingerprint"
@property
- def description(self):
+ def description(self) -> str:
return "Sentry defined fingerprint"
@@ -215,19 +227,39 @@ class SaltedComponentVariant(ComponentVariant):
type = "salted_component"
+ @classmethod
+ def from_component_variant(
+ cls,
+ component_variant: ComponentVariant,
+ fingerprint: list[str],
+ fingerprint_info: FingerprintInfo,
+ ) -> Self:
+ return cls(
+ fingerprint=fingerprint,
+ component=component_variant.component,
+ contributing_component=component_variant.contributing_component,
+ strategy_config=component_variant.config,
+ fingerprint_info=fingerprint_info,
+ )
+
def __init__(
self,
fingerprint: list[str],
+ # The root of the component tree
component: AppGroupingComponent | SystemGroupingComponent | DefaultGroupingComponent,
+ # The highest non-root contributing component in the tree, representing the overall grouping
+ # method (exception, threads, message, etc.). For non-contributing variants, this will be
+ # None.
+ contributing_component: ContributingComponent | None,
strategy_config: StrategyConfiguration,
fingerprint_info: FingerprintInfo,
):
- ComponentVariant.__init__(self, component, strategy_config)
+ ComponentVariant.__init__(self, component, contributing_component, strategy_config)
self.values = fingerprint
self.info = fingerprint_info
@property
- def description(self):
+ def description(self) -> str:
return "modified " + self.component.description
def get_hash(self) -> str | None:
@@ -235,16 +267,19 @@ def get_hash(self) -> str | None:
return None
final_values: list[str | int] = []
for value in self.values:
+ # If we've hit the `{{ default }}` part of the fingerprint, pull in values from the
+ # original grouping method (message, stacktrace, etc.)
if is_default_fingerprint_var(value):
final_values.extend(self.component.iter_values())
else:
final_values.append(value)
return hash_from_values(final_values)
- def _get_metadata_as_dict(self):
- rv = ComponentVariant._get_metadata_as_dict(self)
- rv.update(expose_fingerprint_dict(self.values, self.info))
- return rv
+ def _get_metadata_as_dict(self) -> Mapping[str, Any]:
+ return {
+ **ComponentVariant._get_metadata_as_dict(self),
+ **expose_fingerprint_dict(self.values, self.info),
+ }
class VariantsByDescriptor(TypedDict, total=False):
diff --git a/src/sentry/hybridcloud/models/apikeyreplica.py b/src/sentry/hybridcloud/models/apikeyreplica.py
index a13a967f492918..7538aae8ad5a54 100644
--- a/src/sentry/hybridcloud/models/apikeyreplica.py
+++ b/src/sentry/hybridcloud/models/apikeyreplica.py
@@ -35,6 +35,9 @@ class Meta:
__repr__ = sane_repr("organization_id", "key")
+ def __str__(self) -> str:
+ return f"replica_id={self.id}, status={self.status}"
+
@property
def entity_id(self) -> int:
return self.apikey_id
diff --git a/src/sentry/hybridcloud/models/apitokenreplica.py b/src/sentry/hybridcloud/models/apitokenreplica.py
index d9ee2be13040dc..c9f4402379f536 100644
--- a/src/sentry/hybridcloud/models/apitokenreplica.py
+++ b/src/sentry/hybridcloud/models/apitokenreplica.py
@@ -39,7 +39,7 @@ class Meta:
__repr__ = sane_repr("user_id", "token", "application_id")
def __str__(self) -> str:
- return force_str(self.token)
+ return f"replica_token_id={self.id}, token_id={force_str(self.apitoken_id)}"
@property
def entity_id(self) -> int:
diff --git a/src/sentry/incidents/endpoints/validators.py b/src/sentry/incidents/endpoints/validators.py
index 382484a21e361c..de938853f8b568 100644
--- a/src/sentry/incidents/endpoints/validators.py
+++ b/src/sentry/incidents/endpoints/validators.py
@@ -89,7 +89,6 @@ class MetricAlertComparisonConditionValidator(NumericComparisonConditionValidato
supported_conditions = frozenset((Condition.GREATER, Condition.LESS))
supported_results = frozenset((DetectorPriorityLevel.HIGH, DetectorPriorityLevel.MEDIUM))
- type = "metric_alert"
class MetricAlertsDetectorValidator(BaseGroupTypeDetectorValidator):
diff --git a/src/sentry/incidents/grouptype.py b/src/sentry/incidents/grouptype.py
index b7078c7c2f7a40..1d0873ddb8588c 100644
--- a/src/sentry/incidents/grouptype.py
+++ b/src/sentry/incidents/grouptype.py
@@ -1,15 +1,68 @@
+from __future__ import annotations
+
from dataclasses import dataclass
+from datetime import UTC, datetime
+from typing import Any
+from uuid import uuid4
+from sentry import features
from sentry.incidents.endpoints.validators import MetricAlertsDetectorValidator
from sentry.incidents.utils.types import QuerySubscriptionUpdate
from sentry.issues.grouptype import GroupCategory, GroupType
+from sentry.issues.issue_occurrence import IssueOccurrence
+from sentry.models.organization import Organization
from sentry.ratelimits.sliding_windows import Quota
from sentry.types.group import PriorityLevel
from sentry.workflow_engine.handlers.detector import StatefulDetectorHandler
+from sentry.workflow_engine.models.data_source import DataPacket
+from sentry.workflow_engine.types import DetectorGroupKey
class MetricAlertDetectorHandler(StatefulDetectorHandler[QuerySubscriptionUpdate]):
- pass
+ def build_occurrence_and_event_data(
+ self, group_key: DetectorGroupKey, value: int, new_status: PriorityLevel
+ ) -> tuple[IssueOccurrence, dict[str, Any]]:
+ # Returning a placeholder for now, this may require us passing more info
+
+ occurrence = IssueOccurrence(
+ id=str(uuid4()),
+ project_id=self.detector.project_id,
+ event_id=str(uuid4()),
+ fingerprint=self.build_fingerprint(group_key),
+ issue_title="Some Issue",
+ subtitle="Some subtitle",
+ resource_id=None,
+ evidence_data={"detector_id": self.detector.id, "value": value},
+ evidence_display=[],
+ type=MetricAlertFire,
+ detection_time=datetime.now(UTC),
+ level="error",
+ culprit="Some culprit",
+ initial_issue_priority=new_status.value,
+ )
+ event_data = {
+ "timestamp": occurrence.detection_time,
+ "project_id": occurrence.project_id,
+ "event_id": occurrence.event_id,
+ "platform": "python",
+ "received": occurrence.detection_time,
+ "tags": {},
+ }
+ return occurrence, event_data
+
+ @property
+ def counter_names(self) -> list[str]:
+ # Placeholder for now, this should be a list of counters that we want to update as we go above warning / critical
+ return []
+
+ def get_dedupe_value(self, data_packet: DataPacket[QuerySubscriptionUpdate]) -> int:
+ return int(data_packet.packet.get("timestamp", datetime.now(UTC)).timestamp())
+
+ def get_group_key_values(
+ self, data_packet: DataPacket[QuerySubscriptionUpdate]
+ ) -> dict[DetectorGroupKey, int]:
+ # This is for testing purposes, we'll need to update the values inspected.
+ return {None: data_packet.packet["values"]["foo"]}
# Example GroupType and detector handler for metric alerts. We don't create these issues yet, but we'll use something
@@ -26,3 +79,8 @@ class MetricAlertFire(GroupType):
enable_escalation_detection = False
detector_handler = MetricAlertDetectorHandler
detector_validator = MetricAlertsDetectorValidator
+ detector_config_schema = {} # TODO(colleen): update this
+
+ @classmethod
+ def allow_post_process_group(cls, organization: Organization) -> bool:
+ return features.has("organizations:workflow-engine-metric-alert-processing", organization)
diff --git a/src/sentry/incidents/subscription_processor.py b/src/sentry/incidents/subscription_processor.py
index 4654948204437a..88953fd133def5 100644
--- a/src/sentry/incidents/subscription_processor.py
+++ b/src/sentry/incidents/subscription_processor.py
@@ -43,8 +43,16 @@
)
from sentry.incidents.tasks import handle_trigger_action
from sentry.incidents.utils.metric_issue_poc import create_or_update_metric_issue
-from sentry.incidents.utils.types import QuerySubscriptionUpdate
+from sentry.incidents.utils.process_update_helpers import (
+ get_aggregation_value_helper,
+ get_crash_rate_alert_metrics_aggregation_value_helper,
+)
+from sentry.incidents.utils.types import (
+ DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION,
+ QuerySubscriptionUpdate,
+)
from sentry.models.project import Project
+from sentry.search.eap.utils import add_start_end_conditions
from sentry.seer.anomaly_detection.get_anomaly_data import get_anomaly_data_from_seer
from sentry.seer.anomaly_detection.utils import anomaly_has_confidence, has_anomaly
from sentry.snuba.dataset import Dataset
@@ -53,10 +61,12 @@
get_entity_key_from_query_builder,
get_entity_subscription_from_snuba_query,
)
-from sentry.snuba.models import QuerySubscription
+from sentry.snuba.models import QuerySubscription, SnubaQuery
from sentry.snuba.subscriptions import delete_snuba_subscription
-from sentry.utils import metrics, redis
+from sentry.utils import metrics, redis, snuba_rpc
from sentry.utils.dates import to_datetime
+from sentry.workflow_engine.models import DataPacket
+from sentry.workflow_engine.processors.data_packet import process_data_packets
logger = logging.getLogger(__name__)
REDIS_TTL = int(timedelta(days=7).total_seconds())
@@ -221,41 +231,80 @@ def get_comparison_aggregation_value(
snuba_query,
self.subscription.project.organization_id,
)
- try:
- project_ids = [self.subscription.project_id]
- # TODO: determine whether we need to include the subscription query_extra here
- query_builder = entity_subscription.build_query_builder(
- query=snuba_query.query,
- project_ids=project_ids,
- environment=snuba_query.environment,
- params={
- "organization_id": self.subscription.project.organization.id,
- "project_id": project_ids,
- "start": start,
- "end": end,
- },
- )
- time_col = ENTITY_TIME_COLUMNS[get_entity_key_from_query_builder(query_builder)]
- query_builder.add_conditions(
- [
- Condition(Column(time_col), Op.GTE, start),
- Condition(Column(time_col), Op.LT, end),
- ]
- )
- query_builder.limit = Limit(1)
- results = query_builder.run_query(referrer="subscription_processor.comparison_query")
- comparison_aggregate = list(results["data"][0].values())[0]
+ dataset = Dataset(snuba_query.dataset)
+ query_type = SnubaQuery.Type(snuba_query.type)
+ project_ids = [self.subscription.project_id]
+
+ comparison_aggregate: None | float = None
+ if query_type == SnubaQuery.Type.PERFORMANCE and dataset == Dataset.EventsAnalyticsPlatform:
+ try:
+ rpc_time_series_request = entity_subscription.build_rpc_request(
+ query=snuba_query.query,
+ project_ids=project_ids,
+ environment=snuba_query.environment,
+ params={
+ "organization_id": self.subscription.project.organization.id,
+ "project_id": project_ids,
+ },
+ referrer="subscription_processor.comparison_query",
+ )
- except Exception:
- logger.exception(
- "Failed to run comparison query",
- extra={
- "alert_rule_id": self.alert_rule.id,
- "subscription_id": subscription_update.get("subscription_id"),
- "organization_id": self.alert_rule.organization_id,
- },
- )
- return None
+ rpc_time_series_request = add_start_end_conditions(
+ rpc_time_series_request, start, end
+ )
+
+ rpc_response = snuba_rpc.timeseries_rpc(rpc_time_series_request)
+ if len(rpc_response.result_timeseries):
+ comparison_aggregate = rpc_response.result_timeseries[0].data_points[0].data
+
+ except Exception:
+ logger.exception(
+ "Failed to run RPC comparison query",
+ extra={
+ "alert_rule_id": self.alert_rule.id,
+ "subscription_id": subscription_update.get("subscription_id"),
+ "organization_id": self.alert_rule.organization_id,
+ },
+ )
+ return None
+
+ else:
+ try:
+ # TODO: determine whether we need to include the subscription query_extra here
+ query_builder = entity_subscription.build_query_builder(
+ query=snuba_query.query,
+ project_ids=project_ids,
+ environment=snuba_query.environment,
+ params={
+ "organization_id": self.subscription.project.organization.id,
+ "project_id": project_ids,
+ "start": start,
+ "end": end,
+ },
+ )
+ time_col = ENTITY_TIME_COLUMNS[get_entity_key_from_query_builder(query_builder)]
+ query_builder.add_conditions(
+ [
+ Condition(Column(time_col), Op.GTE, start),
+ Condition(Column(time_col), Op.LT, end),
+ ]
+ )
+ query_builder.limit = Limit(1)
+ results = query_builder.run_query(
+ referrer="subscription_processor.comparison_query"
+ )
+ comparison_aggregate = list(results["data"][0].values())[0]
+
+ except Exception:
+ logger.exception(
+ "Failed to run comparison query",
+ extra={
+ "alert_rule_id": self.alert_rule.id,
+ "subscription_id": subscription_update.get("subscription_id"),
+ "organization_id": self.alert_rule.organization_id,
+ },
+ )
+ return None
if not comparison_aggregate:
metrics.incr("incidents.alert_rules.skipping_update_comparison_value_invalid")
@@ -282,24 +331,12 @@ def get_crash_rate_alert_metrics_aggregation_value(
count is just ignored
- `crashed` represents the total sessions or user counts that crashed.
"""
- row = subscription_update["values"]["data"][0]
- total_session_count = row.get("count", 0)
- crash_count = row.get("crashed", 0)
-
- if total_session_count == 0:
+ # NOTE (mifu67): we create this helper because we also use it in the new detector processing flow
+ aggregation_value = get_crash_rate_alert_metrics_aggregation_value_helper(
+ subscription_update
+ )
+ if aggregation_value is None:
self.reset_trigger_counts()
- metrics.incr("incidents.alert_rules.ignore_update_no_session_data")
- return None
-
- if CRASH_RATE_ALERT_MINIMUM_THRESHOLD is not None:
- min_threshold = int(CRASH_RATE_ALERT_MINIMUM_THRESHOLD)
- if total_session_count < min_threshold:
- self.reset_trigger_counts()
- metrics.incr("incidents.alert_rules.ignore_update_count_lower_than_min_threshold")
- return None
-
- aggregation_value: int = round((1 - crash_count / total_session_count) * 100, 3)
-
return aggregation_value
def get_aggregation_value(self, subscription_update: QuerySubscriptionUpdate) -> float | None:
@@ -308,14 +345,8 @@ def get_aggregation_value(self, subscription_update: QuerySubscriptionUpdate) ->
subscription_update
)
else:
- aggregation_value = list(subscription_update["values"]["data"][0].values())[0]
- # In some cases Snuba can return a None value for an aggregation. This means
- # there were no rows present when we made the query for certain types of aggregations
- # like avg. Defaulting this to 0 for now. It might turn out that we'd prefer to skip
- # the update in the future.
- if aggregation_value is None:
- aggregation_value = 0
-
+ # NOTE (mifu67): we create this helper because we also use it in the new detector processing flow
+ aggregation_value = get_aggregation_value_helper(subscription_update)
if self.alert_rule.comparison_delta:
aggregation_value = self.get_comparison_aggregation_value(
subscription_update, aggregation_value
@@ -358,6 +389,15 @@ def process_update(self, subscription_update: QuerySubscriptionUpdate) -> None:
metrics.incr("incidents.alert_rules.skipping_already_processed_update")
return
+ if features.has(
+ "organizations:workflow-engine-metric-alert-processing",
+ self.subscription.project.organization,
+ ):
+ data_packet = DataPacket[QuerySubscriptionUpdate](
+ query_id=self.subscription.id, packet=subscription_update
+ )
+ process_data_packets([data_packet], DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION)
+
self.last_update = subscription_update["timestamp"]
if (
diff --git a/src/sentry/incidents/utils/process_update_helpers.py b/src/sentry/incidents/utils/process_update_helpers.py
new file mode 100644
index 00000000000000..74b6526eb5218e
--- /dev/null
+++ b/src/sentry/incidents/utils/process_update_helpers.py
@@ -0,0 +1,60 @@
+from sentry.incidents.utils.types import QuerySubscriptionUpdate
+from sentry.utils import metrics
+
+"""
+We pull these methods out of the subscription processor to be used by the
+workflow engine data condition handlers.
+"""
+
+# NOTE (mifu67): this is set to None in the subscription processor code and doesn't
+# seem to be used. Maybe we don't need the logic gated by it?
+CRASH_RATE_ALERT_MINIMUM_THRESHOLD: int | None = None
+
+
+def get_crash_rate_alert_metrics_aggregation_value_helper(
+ subscription_update: QuerySubscriptionUpdate,
+) -> float | None:
+ """
+ Handles validation and extraction of Crash Rate Alerts subscription updates values over
+ metrics dataset.
+ The subscription update looks like
+ [
+ {'project_id': 8, 'tags[5]': 6, 'count': 2.0, 'crashed': 1.0}
+ ]
+ - `count` represents sessions or users sessions that were started, hence to get the crash
+ free percentage, we would need to divide number of crashed sessions by that number,
+ and subtract that value from 1. This is also used when CRASH_RATE_ALERT_MINIMUM_THRESHOLD is
+ set in the sense that if the minimum threshold is greater than the session count,
+ then the update is dropped. If the minimum threshold is not set then the total sessions
+ count is just ignored
+ - `crashed` represents the total sessions or user counts that crashed.
+ """
+ row = subscription_update["values"]["data"][0]
+ total_session_count = row.get("count", 0)
+ crash_count = row.get("crashed", 0)
+
+ if total_session_count == 0:
+ metrics.incr("incidents.alert_rules.ignore_update_no_session_data")
+ return None
+
+ if CRASH_RATE_ALERT_MINIMUM_THRESHOLD is not None:
+ min_threshold = int(CRASH_RATE_ALERT_MINIMUM_THRESHOLD)
+ if total_session_count < min_threshold:
+ metrics.incr("incidents.alert_rules.ignore_update_count_lower_than_min_threshold")
+ return None
+
+ aggregation_value: int = round((1 - crash_count / total_session_count) * 100, 3)
+
+ return aggregation_value
+
+
+def get_aggregation_value_helper(subscription_update: QuerySubscriptionUpdate) -> float:
+ aggregation_value = list(subscription_update["values"]["data"][0].values())[0]
+ # In some cases Snuba can return a None value for an aggregation. This means
+ # there were no rows present when we made the query for certain types of aggregations
+ # like avg. Defaulting this to 0 for now. It might turn out that we'd prefer to skip
+ # the update in the future.
+ if aggregation_value is None:
+ aggregation_value = 0
+
+ return aggregation_value
diff --git a/src/sentry/incidents/utils/types.py b/src/sentry/incidents/utils/types.py
index 4be43160dc8a73..572598108b39e5 100644
--- a/src/sentry/incidents/utils/types.py
+++ b/src/sentry/incidents/utils/types.py
@@ -13,3 +13,6 @@ class QuerySubscriptionUpdate(TypedDict):
class AlertRuleActivationConditionType(Enum):
RELEASE_CREATION = 0
DEPLOY_CREATION = 1
+
+
+DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION = "snuba_query_subscription"
diff --git a/src/sentry/ingest/billing_metrics_consumer.py b/src/sentry/ingest/billing_metrics_consumer.py
index 471855aa356f6d..8009d57a6cdf9e 100644
--- a/src/sentry/ingest/billing_metrics_consumer.py
+++ b/src/sentry/ingest/billing_metrics_consumer.py
@@ -16,13 +16,7 @@
from sentry.constants import DataCategory
from sentry.models.project import Project
-from sentry.sentry_metrics.indexer.strings import (
- SHARED_TAG_STRINGS,
- SPAN_METRICS_NAMES,
- TRANSACTION_METRICS_NAMES,
-)
-from sentry.sentry_metrics.use_case_id_registry import UseCaseID
-from sentry.sentry_metrics.utils import reverse_resolve_tag_value
+from sentry.sentry_metrics.indexer.strings import SPAN_METRICS_NAMES, TRANSACTION_METRICS_NAMES
from sentry.signals import first_custom_metric_received
from sentry.snuba.metrics import parse_mri
from sentry.snuba.metrics.naming_layer.mri import is_custom_metric
@@ -48,9 +42,11 @@ def create_with_partitions(
class BillingTxCountMetricConsumerStrategy(ProcessingStrategy[KafkaPayload]):
- """A metrics consumer that generates a billing outcome for each processed
- transaction, processing a bucket at a time. The transaction count is
- directly taken from the `c:transactions/usage@none` counter metric.
+ """A metrics consumer that generates an accepted outcome for each processed (as opposed to indexed)
+ transaction or span, processing a bucket at a time. The transaction / span count is
+ directly taken from the `c:transactions/usage@none` or `c:spans/usage@none` counter metric.
+
+ See https://develop.sentry.dev/application-architecture/dynamic-sampling/outcomes/.
"""
#: The IDs of the metrics used to count transactions or spans
@@ -58,7 +54,6 @@ class BillingTxCountMetricConsumerStrategy(ProcessingStrategy[KafkaPayload]):
TRANSACTION_METRICS_NAMES["c:transactions/usage@none"]: DataCategory.TRANSACTION,
SPAN_METRICS_NAMES["c:spans/usage@none"]: DataCategory.SPAN,
}
- profile_tag_key = str(SHARED_TAG_STRINGS["has_profile"])
def __init__(self, next_step: ProcessingStrategy[Any]) -> None:
self.__next_step = next_step
@@ -79,7 +74,7 @@ def submit(self, message: Message[KafkaPayload]) -> None:
payload = self._get_payload(message)
- self._produce_billing_outcomes(payload)
+ self._produce_outcomes(payload)
self._flag_metric_received_for_project(payload)
self.__next_step.submit(message)
@@ -106,25 +101,16 @@ def _count_processed_items(self, generic_metric: GenericMetric) -> Mapping[DataC
return items
- def _has_profile(self, generic_metric: GenericMetric) -> bool:
- return bool(
- (tag_value := generic_metric["tags"].get(self.profile_tag_key))
- and "true"
- == reverse_resolve_tag_value(
- UseCaseID.TRANSACTIONS, generic_metric["org_id"], tag_value
- )
- )
-
- def _produce_billing_outcomes(self, generic_metric: GenericMetric) -> None:
+ def _produce_outcomes(self, generic_metric: GenericMetric) -> None:
for category, quantity in self._count_processed_items(generic_metric).items():
- self._produce_billing_outcome(
+ self._produce_accepted_outcome(
org_id=generic_metric["org_id"],
project_id=generic_metric["project_id"],
category=category,
quantity=quantity,
)
- def _produce_billing_outcome(
+ def _produce_accepted_outcome(
self, *, org_id: int, project_id: int, category: DataCategory, quantity: int
) -> None:
if quantity < 1:
diff --git a/src/sentry/integrations/api/bases/doc_integrations.py b/src/sentry/integrations/api/bases/doc_integrations.py
index a6141fbe3f1285..c7349143d9ce6e 100644
--- a/src/sentry/integrations/api/bases/doc_integrations.py
+++ b/src/sentry/integrations/api/bases/doc_integrations.py
@@ -68,7 +68,7 @@ class DocIntegrationsBaseEndpoint(Endpoint):
permission_classes = (DocIntegrationsAndStaffPermission,)
def generate_incoming_metadata(self, request: Request) -> Any:
- return {k: v for k, v in request.json_body.items() if k in METADATA_PROPERTIES}
+ return {k: v for k, v in request.data.items() if k in METADATA_PROPERTIES}
class DocIntegrationBaseEndpoint(DocIntegrationsBaseEndpoint):
diff --git a/src/sentry/integrations/api/endpoints/doc_integration_details.py b/src/sentry/integrations/api/endpoints/doc_integration_details.py
index e6974c13876c38..6d93dbde58634f 100644
--- a/src/sentry/integrations/api/endpoints/doc_integration_details.py
+++ b/src/sentry/integrations/api/endpoints/doc_integration_details.py
@@ -31,7 +31,7 @@ def get(self, request: Request, doc_integration: DocIntegration) -> Response:
return self.respond(serialize(doc_integration, request.user), status=status.HTTP_200_OK)
def put(self, request: Request, doc_integration: DocIntegration) -> Response:
- data = request.json_body
+ data = request.data
data["metadata"] = self.generate_incoming_metadata(request)
serializer = DocIntegrationSerializer(doc_integration, data=data)
diff --git a/src/sentry/integrations/api/endpoints/doc_integrations_index.py b/src/sentry/integrations/api/endpoints/doc_integrations_index.py
index 869079209a13c1..e392a8eba0f7c8 100644
--- a/src/sentry/integrations/api/endpoints/doc_integrations_index.py
+++ b/src/sentry/integrations/api/endpoints/doc_integrations_index.py
@@ -42,7 +42,7 @@ def get(self, request: Request):
def post(self, request: Request):
# Override any incoming JSON for these fields
- data = request.json_body
+ data = request.data
data["is_draft"] = True
data["metadata"] = self.generate_incoming_metadata(request)
serializer = DocIntegrationSerializer(data=data)
diff --git a/src/sentry/integrations/bitbucket/client.py b/src/sentry/integrations/bitbucket/client.py
index 897e309ab7b308..09fee5b8305870 100644
--- a/src/sentry/integrations/bitbucket/client.py
+++ b/src/sentry/integrations/bitbucket/client.py
@@ -89,9 +89,6 @@ def finalize_request(self, prepared_request: PreparedRequest) -> PreparedRequest
def get_issue(self, repo, issue_id):
return self.get(BitbucketAPIPath.issue.format(repo=repo, issue_id=issue_id))
- def get_issues(self, repo):
- return self.get(BitbucketAPIPath.issues.format(repo=repo))
-
def create_issue(self, repo, data):
return self.post(path=BitbucketAPIPath.issues.format(repo=repo), data=data)
diff --git a/src/sentry/integrations/bitbucket/webhook.py b/src/sentry/integrations/bitbucket/webhook.py
index 2460e7aca6c8c5..e03d4b632d38c9 100644
--- a/src/sentry/integrations/bitbucket/webhook.py
+++ b/src/sentry/integrations/bitbucket/webhook.py
@@ -1,6 +1,6 @@
import ipaddress
import logging
-from abc import ABC, abstractmethod
+from abc import ABC
from collections.abc import Mapping
from datetime import timezone
from typing import Any
@@ -17,6 +17,7 @@
from sentry.api.base import Endpoint, region_silo_endpoint
from sentry.integrations.base import IntegrationDomain
from sentry.integrations.bitbucket.constants import BITBUCKET_IP_RANGES, BITBUCKET_IPS
+from sentry.integrations.source_code_management.webhook import SCMWebhook
from sentry.integrations.utils.metrics import IntegrationWebhookEvent, IntegrationWebhookEventType
from sentry.models.commit import Commit
from sentry.models.commitauthor import CommitAuthor
@@ -30,17 +31,12 @@
PROVIDER_NAME = "integrations:bitbucket"
-class Webhook(ABC):
+class BitbucketWebhook(SCMWebhook, ABC):
@property
- @abstractmethod
- def event_type(self) -> IntegrationWebhookEventType:
- raise NotImplementedError
-
- @abstractmethod
- def __call__(self, organization: Organization, event: Mapping[str, Any]):
- raise NotImplementedError
+ def provider(self) -> str:
+ return "bitbucket"
- def update_repo_data(self, repo, event):
+ def update_repo_data(self, repo: Repository, event: Mapping[str, Any]) -> None:
"""
Given a webhook payload, update stored repo data if needed.
@@ -68,16 +64,19 @@ def update_repo_data(self, repo, event):
)
-class PushEventWebhook(Webhook):
+class PushEventWebhook(BitbucketWebhook):
# https://confluence.atlassian.com/bitbucket/event-payloads-740262817.html#EventPayloads-Push
@property
def event_type(self) -> IntegrationWebhookEventType:
return IntegrationWebhookEventType.PUSH
- def __call__(self, organization: Organization, event: Mapping[str, Any]):
+ def __call__(self, event: Mapping[str, Any], **kwargs) -> None:
authors = {}
+ if not (organization := kwargs.get("organization")):
+ raise ValueError("Missing organization")
+
try:
repo = Repository.objects.get(
organization_id=organization.id,
@@ -131,9 +130,9 @@ class BitbucketWebhookEndpoint(Endpoint):
"POST": ApiPublishStatus.PRIVATE,
}
permission_classes = ()
- _handlers: dict[str, type[Webhook]] = {"repo:push": PushEventWebhook}
+ _handlers: dict[str, type[BitbucketWebhook]] = {"repo:push": PushEventWebhook}
- def get_handler(self, event_type) -> type[Webhook] | None:
+ def get_handler(self, event_type) -> type[BitbucketWebhook] | None:
return self._handlers.get(event_type)
@method_decorator(csrf_exempt)
@@ -205,8 +204,8 @@ def post(self, request: HttpRequest, organization_id: int) -> HttpResponse:
with IntegrationWebhookEvent(
interaction_type=event_handler.event_type,
domain=IntegrationDomain.SOURCE_CODE_MANAGEMENT,
- provider_key="bitbucket",
+ provider_key=event_handler.provider,
).capture():
- event_handler(organization, event)
+ event_handler(event, organization=organization)
return HttpResponse(status=204)
diff --git a/src/sentry/integrations/bitbucket_server/webhook.py b/src/sentry/integrations/bitbucket_server/webhook.py
index f18c226c43a36d..0fdca807dd97a3 100644
--- a/src/sentry/integrations/bitbucket_server/webhook.py
+++ b/src/sentry/integrations/bitbucket_server/webhook.py
@@ -1,5 +1,5 @@
import logging
-from abc import ABC, abstractmethod
+from abc import ABC
from collections.abc import Mapping
from datetime import datetime, timezone
from typing import Any
@@ -7,14 +7,18 @@
import orjson
import sentry_sdk
from django.db import IntegrityError, router, transaction
-from django.http import HttpRequest, HttpResponse
+from django.http import Http404, HttpRequest, HttpResponse
from django.http.response import HttpResponseBase
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
-from django.views.generic.base import View
+from sentry.api.api_owners import ApiOwner
+from sentry.api.api_publish_status import ApiPublishStatus
+from sentry.api.base import Endpoint
+from sentry.api.exceptions import BadRequest
from sentry.integrations.base import IntegrationDomain
from sentry.integrations.models.integration import Integration
+from sentry.integrations.source_code_management.webhook import SCMWebhook
from sentry.integrations.utils.metrics import IntegrationWebhookEvent, IntegrationWebhookEventType
from sentry.models.commit import Commit
from sentry.models.commitauthor import CommitAuthor
@@ -29,15 +33,10 @@
PROVIDER_NAME = "integrations:bitbucket_server"
-class Webhook(ABC):
+class BitbucketServerWebhook(SCMWebhook, ABC):
@property
- @abstractmethod
- def event_type(self) -> IntegrationWebhookEventType:
- raise NotImplementedError
-
- @abstractmethod
- def __call__(self, organization: Organization, integration_id: int, event: Mapping[str, Any]):
- raise NotImplementedError
+ def provider(self):
+ return "bitbucket_server"
def update_repo_data(self, repo, event):
"""
@@ -49,16 +48,20 @@ def update_repo_data(self, repo, event):
repo.update(name=name_from_event, config=dict(repo.config, name=name_from_event))
-class PushEventWebhook(Webhook):
+class PushEventWebhook(BitbucketServerWebhook):
@property
def event_type(self) -> IntegrationWebhookEventType:
return IntegrationWebhookEventType.PUSH
- def __call__(
- self, organization: Organization, integration_id: int, event: Mapping[str, Any]
- ) -> HttpResponse:
+ def __call__(self, event: Mapping[str, Any], **kwargs) -> None:
authors = {}
+ if not (
+ (organization := kwargs.get("organization"))
+ and (integration_id := kwargs.get("integration_id"))
+ ):
+ raise ValueError("Organization and integration_id must be provided")
+
try:
repo = Repository.objects.get(
organization_id=organization.id,
@@ -66,18 +69,18 @@ def __call__(
external_id=str(event["repository"]["id"]),
)
except Repository.DoesNotExist:
- return HttpResponse(status=404)
+ raise Http404()
provider = repo.get_provider()
try:
installation = provider.get_installation(integration_id, organization.id)
except Integration.DoesNotExist:
- return HttpResponse(status=404)
+ raise Http404()
try:
client = installation.get_client()
except IntegrationError:
- return HttpResponse(status=400)
+ raise BadRequest()
# while we're here, make sure repo data is up to date
self.update_repo_data(repo, event)
@@ -91,12 +94,12 @@ def __call__(
project_name, repo_name, from_hash, change.get("toHash")
)
except ApiHostError:
- return HttpResponse(status=409)
+ raise BadRequest(detail="Unable to reach host")
except ApiUnauthorized:
- return HttpResponse(status=400)
+ raise BadRequest()
except Exception as e:
sentry_sdk.capture_exception(e)
- return HttpResponse(status=400)
+ raise
for commit in commits:
if IntegrationRepositoryProvider.should_ignore_commit(commit["message"]):
@@ -131,14 +134,19 @@ def __call__(
except IntegrityError:
pass
- return HttpResponse(status=204)
-
@region_silo_view
-class BitbucketServerWebhookEndpoint(View):
- _handlers: dict[str, type[Webhook]] = {"repo:refs_changed": PushEventWebhook}
+class BitbucketServerWebhookEndpoint(Endpoint):
+ authentication_classes = ()
+ permission_classes = ()
+ owner = ApiOwner.ECOSYSTEM
+ publish_status = {
+ "POST": ApiPublishStatus.PRIVATE,
+ }
+
+ _handlers: dict[str, type[BitbucketServerWebhook]] = {"repo:refs_changed": PushEventWebhook}
- def get_handler(self, event_type) -> type[Webhook] | None:
+ def get_handler(self, event_type) -> type[BitbucketServerWebhook] | None:
return self._handlers.get(event_type)
@method_decorator(csrf_exempt)
@@ -150,7 +158,7 @@ def dispatch(self, request: HttpRequest, *args, **kwargs) -> HttpResponseBase:
def post(self, request: HttpRequest, organization_id, integration_id) -> HttpResponseBase:
try:
- organization = Organization.objects.get_from_cache(id=organization_id)
+ organization: Organization = Organization.objects.get_from_cache(id=organization_id)
except Organization.DoesNotExist:
logger.exception(
"%s.webhook.invalid-organization",
@@ -194,6 +202,8 @@ def post(self, request: HttpRequest, organization_id, integration_id) -> HttpRes
with IntegrationWebhookEvent(
interaction_type=event_handler.event_type,
domain=IntegrationDomain.SOURCE_CODE_MANAGEMENT,
- provider_key="bitbucket-server",
+ provider_key=event_handler.provider,
).capture():
- return event_handler(organization, integration_id, event)
+ event_handler(event, organization=organization, integration_id=integration_id)
+
+ return HttpResponse(status=204)
diff --git a/src/sentry/integrations/discord/actions/issue_alert/notification.py b/src/sentry/integrations/discord/actions/issue_alert/notification.py
index 27e3ac45a8f2be..66122003d18b73 100644
--- a/src/sentry/integrations/discord/actions/issue_alert/notification.py
+++ b/src/sentry/integrations/discord/actions/issue_alert/notification.py
@@ -5,6 +5,11 @@
from sentry.integrations.discord.actions.issue_alert.form import DiscordNotifyServiceForm
from sentry.integrations.discord.client import DiscordClient
from sentry.integrations.discord.message_builder.issues import DiscordIssuesMessageBuilder
+from sentry.integrations.discord.spec import DiscordMessagingSpec
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
from sentry.rules.actions import IntegrationEventAction
from sentry.rules.base import CallbackFuture
from sentry.types.rules import RuleFuture
@@ -46,19 +51,27 @@ def send_notification(event: GroupEvent, futures: Sequence[RuleFuture]) -> None:
message = DiscordIssuesMessageBuilder(event.group, event=event, tags=tags, rules=rules)
client = DiscordClient()
- try:
- client.send_message(channel_id, message, notification_uuid=notification_uuid)
- except Exception as e:
- self.logger.error(
- "discord.notification.message_send_failure",
- extra={
- "error": str(e),
- "project_id": event.project_id,
- "event_id": event.event_id,
- "guild_id": integration.external_id,
- "channel_id": channel_id,
- },
- )
+ with MessagingInteractionEvent(
+ interaction_type=MessagingInteractionType.SEND_ISSUE_ALERT_NOTIFICATION,
+ spec=DiscordMessagingSpec(),
+ ).capture() as lifecycle:
+ try:
+ lifecycle.add_extras({"integration_id": integration.id, "channel": channel_id})
+ client.send_message(channel_id, message, notification_uuid=notification_uuid)
+ except Exception as e:
+ # TODO(iamrajjoshi): Update some of these failures to halts
+ lifecycle.record_failure(e)
+ # TODO(iamrajjoshi): Remove the logger after we audit lifecycle
+ self.logger.error(
+ "discord.notification.message_send_failure",
+ extra={
+ "error": str(e),
+ "project_id": event.project_id,
+ "event_id": event.event_id,
+ "guild_id": integration.external_id,
+ "channel_id": channel_id,
+ },
+ )
rule = rules[0] if rules else None
self.record_notification_sent(event, channel_id, rule, notification_uuid)
diff --git a/src/sentry/integrations/discord/actions/metric_alert.py b/src/sentry/integrations/discord/actions/metric_alert.py
index beff2896743e6f..e2e6f757389b52 100644
--- a/src/sentry/integrations/discord/actions/metric_alert.py
+++ b/src/sentry/integrations/discord/actions/metric_alert.py
@@ -10,6 +10,11 @@
from sentry.integrations.discord.message_builder.metric_alerts import (
DiscordMetricAlertMessageBuilder,
)
+from sentry.integrations.discord.spec import DiscordMessagingSpec
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
from ..utils import logger
@@ -51,13 +56,19 @@ def send_incident_alert_notification(
)
client = DiscordClient()
- try:
- client.send_message(channel, message)
- except Exception as error:
- logger.warning(
- "discord.metric_alert.message_send_failure",
- extra={"error": error, "incident_id": incident.id, "channel_id": channel},
- )
- return False
- else:
+ with MessagingInteractionEvent(
+ interaction_type=MessagingInteractionType.SEND_INCIDENT_ALERT_NOTIFICATION,
+ spec=DiscordMessagingSpec(),
+ ).capture() as lifecycle:
+ try:
+ client.send_message(channel, message)
+ except Exception as error:
+ # TODO(iamrajjoshi): Update some of these failures to halts
+ lifecycle.record_failure(error)
+ # TODO(iamrajjoshi): Remove the logger after we audit lifecycle
+ logger.warning(
+ "discord.metric_alert.message_send_failure",
+ extra={"error": error, "incident_id": incident.id, "channel_id": channel},
+ )
+ return False
return True
diff --git a/src/sentry/integrations/discord/views/link_identity.py b/src/sentry/integrations/discord/views/link_identity.py
index 2ceb879317c181..968fcdc96aec35 100644
--- a/src/sentry/integrations/discord/views/link_identity.py
+++ b/src/sentry/integrations/discord/views/link_identity.py
@@ -26,7 +26,13 @@ class DiscordLinkIdentityView(DiscordIdentityLinkageView, LinkIdentityView):
def get_success_template_and_context(
self, params: Mapping[str, Any], integration: Integration | None
) -> tuple[str, dict[str, Any]]:
- return "sentry/integrations/discord/linked.html", {}
+ if integration is None:
+ raise ValueError(
+ 'integration is required for linking (params must include "integration_id")'
+ )
+ return "sentry/integrations/discord/linked.html", {
+ "guild_id": integration.external_id,
+ }
@property
def analytics_operation_key(self) -> str | None:
diff --git a/src/sentry/integrations/discord/webhooks/message_component.py b/src/sentry/integrations/discord/webhooks/message_component.py
index 39d58be7d61ae2..04f4691617166c 100644
--- a/src/sentry/integrations/discord/webhooks/message_component.py
+++ b/src/sentry/integrations/discord/webhooks/message_component.py
@@ -247,13 +247,7 @@ def update_group(self, data: Mapping[str, object]) -> None:
status=data,
)
update_groups(
- request=self.request.request,
- group_ids=[self.group.id],
- projects=[self.group.project],
- organization_id=self.group.organization.id,
- search_fn=None,
- user=self.user,
- data=data,
+ request=self.request.request, groups=[self.group], user=self.user, data=data
)
diff --git a/src/sentry/integrations/github/client.py b/src/sentry/integrations/github/client.py
index 98c189d50bbbfd..b1f831e9b42c8b 100644
--- a/src/sentry/integrations/github/client.py
+++ b/src/sentry/integrations/github/client.py
@@ -141,7 +141,7 @@ def _get_token(self, prepared_request: PreparedRequest) -> str | None:
access_token: str | None = self.integration.metadata.get("access_token")
expires_at: str | None = self.integration.metadata.get("expires_at")
is_expired = (
- bool(expires_at) and datetime.strptime(cast(str, expires_at), "%Y-%m-%dT%H:%M:%S") < now
+ bool(expires_at) and datetime.fromisoformat(expires_at).replace(tzinfo=None) < now
)
should_refresh = not access_token or not expires_at or is_expired
@@ -587,10 +587,6 @@ def get_with_pagination(
page_number += 1
return output
- def get_issues(self, repo: str) -> Sequence[Any]:
- issues: Sequence[Any] = self.get(f"/repos/{repo}/issues")
- return issues
-
def search_issues(self, query: str) -> Mapping[str, Sequence[Mapping[str, Any]]]:
"""
https://docs.github.com/en/rest/search?#search-issues-and-pull-requests
diff --git a/src/sentry/integrations/github/issues.py b/src/sentry/integrations/github/issues.py
index cdd510da75cdaf..cc803cb9b40984 100644
--- a/src/sentry/integrations/github/issues.py
+++ b/src/sentry/integrations/github/issues.py
@@ -287,17 +287,6 @@ def get_allowed_assignees(self, repo: str) -> Sequence[tuple[str, str]]:
return (("", "Unassigned"),) + users
- def get_repo_issues(self, repo: str) -> Sequence[tuple[str, str]]:
- client = self.get_client()
- try:
- response = client.get_issues(repo)
- except Exception as e:
- self.raise_error(e)
-
- issues = tuple((i["number"], "#{} {}".format(i["number"], i["title"])) for i in response)
-
- return issues
-
def get_repo_labels(self, repo: str) -> Sequence[tuple[str, str]]:
client = self.get_client()
try:
diff --git a/src/sentry/integrations/github/webhook.py b/src/sentry/integrations/github/webhook.py
index d5066d0e908673..24cd6fdfe02f9d 100644
--- a/src/sentry/integrations/github/webhook.py
+++ b/src/sentry/integrations/github/webhook.py
@@ -26,12 +26,10 @@
from sentry.integrations.base import IntegrationDomain
from sentry.integrations.github.tasks.open_pr_comment import open_pr_comment_workflow
from sentry.integrations.pipeline import ensure_integration
-from sentry.integrations.services.integration.model import (
- RpcIntegration,
- RpcOrganizationIntegration,
-)
+from sentry.integrations.services.integration.model import RpcIntegration
from sentry.integrations.services.integration.service import integration_service
from sentry.integrations.services.repository.service import repository_service
+from sentry.integrations.source_code_management.webhook import SCMWebhook
from sentry.integrations.utils.metrics import IntegrationWebhookEvent, IntegrationWebhookEventType
from sentry.integrations.utils.scope import clear_tags_and_context
from sentry.models.commit import Commit
@@ -73,31 +71,21 @@ def get_file_language(filename: str) -> str | None:
return language
-class Webhook(ABC):
+class GitHubWebhook(SCMWebhook, ABC):
"""
Base class for GitHub webhooks handled in region silos.
"""
- provider = "github"
-
@property
- @abstractmethod
- def event_type(self) -> IntegrationWebhookEventType:
- raise NotImplementedError
+ def provider(self) -> str:
+ return "github"
@abstractmethod
- def _handle(
- self,
- integration: RpcIntegration,
- event: Mapping[str, Any],
- organization: Organization,
- repo: Repository,
- host: str | None = None,
- ) -> None:
- raise NotImplementedError
+ def _handle(self, integration: RpcIntegration, event: Mapping[str, Any], **kwargs) -> None:
+ pass
- def __call__(self, event: Mapping[str, Any], host: str | None = None) -> None:
- external_id = get_github_external_id(event=event, host=host)
+ def __call__(self, event: Mapping[str, Any], **kwargs) -> None:
+ external_id = get_github_external_id(event=event, host=kwargs.get("host"))
result = integration_service.organization_contexts(
external_id=external_id, provider=self.provider
@@ -166,7 +154,12 @@ def __call__(self, event: Mapping[str, Any], host: str | None = None) -> None:
for repo in repos.exclude(status=ObjectStatus.HIDDEN):
self.update_repo_data(repo, event)
- self._handle(integration, event, orgs[repo.organization_id], repo)
+ self._handle(
+ integration=integration,
+ event=event,
+ organization=orgs[repo.organization_id],
+ repo=repo,
+ )
def update_repo_data(self, repo: Repository, event: Mapping[str, Any]) -> None:
"""
@@ -208,21 +201,28 @@ def update_repo_data(self, repo: Repository, event: Mapping[str, Any]) -> None:
)
pass
+ def is_anonymous_email(self, email: str) -> bool:
+ return email[-25:] == "@users.noreply.github.com"
+
+ def get_external_id(self, username: str) -> str:
+ return f"github:{username}"
-class InstallationEventWebhook:
+ def get_idp_external_id(self, integration: RpcIntegration, host: str | None = None) -> str:
+ return options.get("github-app.id")
+
+
+class InstallationEventWebhook(GitHubWebhook):
"""
Unlike other GitHub webhooks, installation webhooks are handled in control silo.
https://developer.github.com/v3/activity/events/types/#installationevent
"""
- provider = "github"
-
@property
def event_type(self) -> IntegrationWebhookEventType:
return IntegrationWebhookEventType.INSTALLATION
- def __call__(self, event: Mapping[str, Any], host: str | None = None) -> None:
+ def __call__(self, event: Mapping[str, Any], **kwargs) -> None:
installation = event["installation"]
if not installation:
@@ -241,7 +241,7 @@ def __call__(self, event: Mapping[str, Any], host: str | None = None) -> None:
if event["action"] == "deleted":
external_id = event["installation"]["id"]
- if host:
+ if host := kwargs.get("host"):
external_id = "{}:{}".format(host, event["installation"]["id"])
result = integration_service.organization_contexts(
provider=self.provider,
@@ -251,7 +251,7 @@ def __call__(self, event: Mapping[str, Any], host: str | None = None) -> None:
org_integrations = result.organization_integrations
if integration is not None:
- self._handle_delete(event, integration, org_integrations)
+ self._handle(integration, event, org_integrations=org_integrations)
else:
# It seems possible for the GH or GHE app to be installed on their
# end, but the integration to not exist. Possibly from deleting in
@@ -267,13 +267,13 @@ def __call__(self, event: Mapping[str, Any], host: str | None = None) -> None:
)
logger.error("Installation is missing.")
- def _handle_delete(
+ def _handle(
self,
- event: Mapping[str, Any],
integration: RpcIntegration,
- org_integrations: list[RpcOrganizationIntegration],
+ event: Mapping[str, Any],
+ **kwargs,
) -> None:
- org_ids = {oi.organization_id for oi in org_integrations}
+ org_ids = {oi.organization_id for oi in kwargs.get("org_integrations", [])}
logger.info(
"InstallationEventWebhook._handle_delete",
@@ -294,22 +294,13 @@ def _handle_delete(
)
-class PushEventWebhook(Webhook):
+class PushEventWebhook(GitHubWebhook):
"""https://developer.github.com/v3/activity/events/types/#pushevent"""
@property
def event_type(self) -> IntegrationWebhookEventType:
return IntegrationWebhookEventType.PUSH
- def is_anonymous_email(self, email: str) -> bool:
- return email[-25:] == "@users.noreply.github.com"
-
- def get_external_id(self, username: str) -> str:
- return f"github:{username}"
-
- def get_idp_external_id(self, integration: RpcIntegration, host: str | None = None) -> str:
- return options.get("github-app.id")
-
def should_ignore_commit(self, commit: Mapping[str, Any]) -> bool:
return GitHubRepositoryProvider.should_ignore_commit(commit["message"])
@@ -317,11 +308,12 @@ def _handle(
self,
integration: RpcIntegration,
event: Mapping[str, Any],
- organization: Organization,
- repo: Repository,
- host: str | None = None,
+ **kwargs,
) -> None:
authors = {}
+ if not ((organization := kwargs.get("organization")) and (repo := kwargs.get("repo"))):
+ raise ValueError("Missing organization and repo")
+
client = integration.get_installation(organization_id=organization.id).get_client()
gh_username_cache: MutableMapping[str, str | None] = {}
@@ -373,7 +365,7 @@ def _handle(
"identity_ext_id": gh_user["id"],
"provider_type": self.provider,
"provider_ext_id": self.get_idp_external_id(
- integration, host
+ integration, kwargs.get("host")
),
}
)
@@ -474,29 +466,18 @@ def _handle(
repo.save()
-class PullRequestEventWebhook(Webhook):
+class PullRequestEventWebhook(GitHubWebhook):
"""https://developer.github.com/v3/activity/events/types/#pullrequestevent"""
@property
def event_type(self) -> IntegrationWebhookEventType:
return IntegrationWebhookEventType.PULL_REQUEST
- def is_anonymous_email(self, email: str) -> bool:
- return email[-25:] == "@users.noreply.github.com"
-
- def get_external_id(self, username: str) -> str:
- return f"github:{username}"
-
- def get_idp_external_id(self, integration: RpcIntegration, host: str | None = None) -> str:
- return options.get("github-app.id")
-
def _handle(
self,
integration: RpcIntegration,
event: Mapping[str, Any],
- organization: Organization,
- repo: Repository,
- host: str | None = None,
+ **kwargs,
) -> None:
pull_request = event["pull_request"]
number = pull_request["number"]
@@ -522,6 +503,10 @@ def _handle(
merge_commit_sha = pull_request["merge_commit_sha"] if pull_request["merged"] else None
author_email = "{}@localhost".format(user["login"][:65])
+
+ if not ((organization := kwargs.get("organization")) and (repo := kwargs.get("repo"))):
+ raise ValueError("Missing organization and repo")
+
try:
commit_author = CommitAuthor.objects.get(
external_id=self.get_external_id(user["login"]), organization_id=organization.id
@@ -533,7 +518,7 @@ def _handle(
filter={
"identity_ext_id": user["id"],
"provider_type": self.provider,
- "provider_ext_id": self.get_idp_external_id(integration, host),
+ "provider_ext_id": self.get_idp_external_id(integration, kwargs.get("host")),
}
)
if identity is not None:
@@ -612,13 +597,13 @@ class GitHubIntegrationsWebhookEndpoint(Endpoint):
"POST": ApiPublishStatus.PRIVATE,
}
- _handlers: dict[str, type[Webhook] | type[InstallationEventWebhook]] = {
+ _handlers: dict[str, type[GitHubWebhook]] = {
"push": PushEventWebhook,
"pull_request": PullRequestEventWebhook,
"installation": InstallationEventWebhook,
}
- def get_handler(self, event_type: str) -> type[Webhook] | type[InstallationEventWebhook] | None:
+ def get_handler(self, event_type: str) -> type[GitHubWebhook] | None:
return self._handlers.get(event_type)
def is_valid_signature(self, method: str, body: bytes, secret: str, signature: str) -> bool:
@@ -699,7 +684,7 @@ def handle(self, request: HttpRequest) -> HttpResponse:
with IntegrationWebhookEvent(
interaction_type=event_handler.event_type,
domain=IntegrationDomain.SOURCE_CODE_MANAGEMENT,
- provider_key="github",
+ provider_key=event_handler.provider,
).capture():
event_handler(event)
return HttpResponse(status=204)
diff --git a/src/sentry/integrations/github_enterprise/webhook.py b/src/sentry/integrations/github_enterprise/webhook.py
index 230ab9e069434f..8550214d531ff7 100644
--- a/src/sentry/integrations/github_enterprise/webhook.py
+++ b/src/sentry/integrations/github_enterprise/webhook.py
@@ -18,10 +18,10 @@
from sentry.constants import ObjectStatus
from sentry.integrations.base import IntegrationDomain
from sentry.integrations.github.webhook import (
+ GitHubWebhook,
InstallationEventWebhook,
PullRequestEventWebhook,
PushEventWebhook,
- Webhook,
get_github_external_id,
)
from sentry.integrations.utils.metrics import IntegrationWebhookEvent
@@ -29,8 +29,6 @@
from sentry.utils import metrics
from sentry.utils.sdk import Scope
-from .repository import GitHubEnterpriseRepositoryProvider
-
logger = logging.getLogger("sentry.webhooks")
from sentry.api.base import Endpoint, region_silo_endpoint
from sentry.integrations.services.integration import integration_service
@@ -89,16 +87,10 @@ def get_installation_metadata(event, host):
return integration.metadata["installation"]
-class GitHubEnterpriseInstallationEventWebhook(InstallationEventWebhook):
- provider = "github_enterprise"
-
-
-class GitHubEnterprisePushEventWebhook(PushEventWebhook):
- provider = "github_enterprise"
-
- # https://developer.github.com/v3/activity/events/types/#pushevent
- def is_anonymous_email(self, email: str) -> bool:
- return email[-25:] == "@users.noreply.github.com"
+class GitHubEnterpriseWebhook:
+ @property
+ def provider(self) -> str:
+ return "github_enterprise"
def get_external_id(self, username: str) -> str:
return f"github_enterprise:{username}"
@@ -106,29 +98,24 @@ def get_external_id(self, username: str) -> str:
def get_idp_external_id(self, integration: RpcIntegration, host: str | None = None) -> str:
return "{}:{}".format(host, integration.metadata["installation"]["id"])
- def should_ignore_commit(self, commit):
- return GitHubEnterpriseRepositoryProvider.should_ignore_commit(commit["message"])
+class GitHubEnterpriseInstallationEventWebhook(GitHubEnterpriseWebhook, InstallationEventWebhook):
+ pass
-class GitHubEnterprisePullRequestEventWebhook(PullRequestEventWebhook):
- provider = "github_enterprise"
- # https://developer.github.com/v3/activity/events/types/#pullrequestevent
- def is_anonymous_email(self, email: str) -> bool:
- return email[-25:] == "@users.noreply.github.com"
+class GitHubEnterprisePushEventWebhook(GitHubEnterpriseWebhook, PushEventWebhook):
+ pass
- def get_external_id(self, username: str) -> str:
- return f"github_enterprise:{username}"
- def get_idp_external_id(self, integration: RpcIntegration, host: str | None = None) -> str:
- return "{}:{}".format(host, integration.metadata["installation"]["id"])
+class GitHubEnterprisePullRequestEventWebhook(GitHubEnterpriseWebhook, PullRequestEventWebhook):
+ pass
class GitHubEnterpriseWebhookBase(Endpoint):
authentication_classes = ()
permission_classes = ()
- _handlers: dict[str, type[InstallationEventWebhook] | type[Webhook]] = {}
+ _handlers: dict[str, type[GitHubWebhook]] = {}
# https://developer.github.com/webhooks/
def get_handler(self, event_type):
@@ -163,7 +150,7 @@ def get_secret(self, event, host):
else:
return None
- def handle(self, request: HttpRequest) -> HttpResponse:
+ def _handle(self, request: HttpRequest) -> HttpResponse:
clear_tags_and_context()
scope = Scope.get_isolation_scope()
@@ -301,9 +288,9 @@ def handle(self, request: HttpRequest) -> HttpResponse:
with IntegrationWebhookEvent(
interaction_type=event_handler.event_type,
domain=IntegrationDomain.SOURCE_CODE_MANAGEMENT,
- provider_key="github-enterprise",
+ provider_key=event_handler.provider,
).capture():
- event_handler(event, host)
+ event_handler(event, host=host)
return HttpResponse(status=204)
@@ -329,4 +316,4 @@ def dispatch(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
@method_decorator(csrf_exempt)
def post(self, request: HttpRequest) -> HttpResponse:
- return self.handle(request)
+ return self._handle(request)
diff --git a/src/sentry/integrations/gitlab/webhooks.py b/src/sentry/integrations/gitlab/webhooks.py
index 6d2f3cfd0f5c5f..0a0123c842f5f0 100644
--- a/src/sentry/integrations/gitlab/webhooks.py
+++ b/src/sentry/integrations/gitlab/webhooks.py
@@ -1,7 +1,7 @@
from __future__ import annotations
import logging
-from abc import ABC, abstractmethod
+from abc import ABC
from collections.abc import Mapping
from datetime import timezone
from typing import Any
@@ -20,6 +20,7 @@
from sentry.integrations.base import IntegrationDomain
from sentry.integrations.services.integration import integration_service
from sentry.integrations.services.integration.model import RpcIntegration
+from sentry.integrations.source_code_management.webhook import SCMWebhook
from sentry.integrations.utils.metrics import IntegrationWebhookEvent, IntegrationWebhookEventType
from sentry.integrations.utils.scope import clear_tags_and_context
from sentry.models.commit import Commit
@@ -36,17 +37,38 @@
GITHUB_WEBHOOK_SECRET_INVALID_ERROR = """Gitlab's webhook secret does not match. Refresh token (or re-install the integration) by following this https://docs.sentry.io/organization/integrations/integration-platform/public-integration/#refreshing-tokens."""
-class Webhook(ABC):
+def get_gitlab_external_id(request, extra) -> tuple[str, str] | HttpResponse:
+ token = ""
+ try:
+ # Munge the token to extract the integration external_id.
+ # gitlab hook payloads don't give us enough unique context
+ # to find data on our side so we embed one in the token.
+ token = request.META["HTTP_X_GITLAB_TOKEN"]
+ # e.g. "example.gitlab.com:group-x:webhook_secret_from_sentry_integration_table"
+ instance, group_path, secret = token.split(":")
+ external_id = f"{instance}:{group_path}"
+ return (external_id, secret)
+ except KeyError:
+ logger.info("gitlab.webhook.missing-gitlab-token")
+ extra["reason"] = "The customer needs to set a Secret Token in their webhook."
+ logger.exception(extra["reason"])
+ return HttpResponse(status=400, reason=extra["reason"])
+ except ValueError:
+ logger.info("gitlab.webhook.malformed-gitlab-token", extra=extra)
+ extra["reason"] = "The customer's Secret Token is malformed."
+ logger.exception(extra["reason"])
+ return HttpResponse(status=400, reason=extra["reason"])
+ except Exception:
+ logger.info("gitlab.webhook.invalid-token", extra=extra)
+ extra["reason"] = "Generic catch-all error."
+ logger.exception(extra["reason"])
+ return HttpResponse(status=400, reason=extra["reason"])
+
+
+class GitlabWebhook(SCMWebhook, ABC):
@property
- @abstractmethod
- def event_type(self) -> IntegrationWebhookEventType:
- raise NotImplementedError
-
- @abstractmethod
- def __call__(
- self, integration: RpcIntegration, organization: RpcOrganization, event: Mapping[str, Any]
- ):
- raise NotImplementedError
+ def provider(self) -> str:
+ return "gitlab"
def get_repo(
self, integration: RpcIntegration, organization: RpcOrganization, event: Mapping[str, Any]
@@ -94,7 +116,7 @@ def update_repo_data(self, repo: Repository, event: Mapping[str, Any]):
)
-class MergeEventWebhook(Webhook):
+class MergeEventWebhook(GitlabWebhook):
"""
Handle Merge Request Hook
@@ -105,9 +127,13 @@ class MergeEventWebhook(Webhook):
def event_type(self) -> IntegrationWebhookEventType:
return IntegrationWebhookEventType.PULL_REQUEST
- def __call__(
- self, integration: RpcIntegration, organization: RpcOrganization, event: Mapping[str, Any]
- ):
+ def __call__(self, event: Mapping[str, Any], **kwargs):
+ if not (
+ (organization := kwargs.get("organization"))
+ and (integration := kwargs.get("integration"))
+ ):
+ raise ValueError("Organization and integration must be provided")
+
repo = self.get_repo(integration, organization, event)
if repo is None:
return
@@ -162,7 +188,7 @@ def __call__(
pass
-class PushEventWebhook(Webhook):
+class PushEventWebhook(GitlabWebhook):
"""
Handle push hook
@@ -173,9 +199,13 @@ class PushEventWebhook(Webhook):
def event_type(self) -> IntegrationWebhookEventType:
return IntegrationWebhookEventType.PUSH
- def __call__(
- self, integration: RpcIntegration, organization: RpcOrganization, event: Mapping[str, Any]
- ):
+ def __call__(self, event: Mapping[str, Any], **kwargs):
+ if not (
+ (organization := kwargs.get("organization"))
+ and (integration := kwargs.get("integration"))
+ ):
+ raise ValueError("Organization and integration must be provided")
+
repo = self.get_repo(integration, organization, event)
if repo is None:
return
@@ -222,37 +252,8 @@ def __call__(
pass
-class GitlabWebhookMixin:
- def _get_external_id(self, request, extra) -> tuple[str, str] | HttpResponse:
- token = ""
- try:
- # Munge the token to extract the integration external_id.
- # gitlab hook payloads don't give us enough unique context
- # to find data on our side so we embed one in the token.
- token = request.META["HTTP_X_GITLAB_TOKEN"]
- # e.g. "example.gitlab.com:group-x:webhook_secret_from_sentry_integration_table"
- instance, group_path, secret = token.split(":")
- external_id = f"{instance}:{group_path}"
- return (external_id, secret)
- except KeyError:
- logger.info("gitlab.webhook.missing-gitlab-token")
- extra["reason"] = "The customer needs to set a Secret Token in their webhook."
- logger.exception(extra["reason"])
- return HttpResponse(status=400, reason=extra["reason"])
- except ValueError:
- logger.info("gitlab.webhook.malformed-gitlab-token", extra=extra)
- extra["reason"] = "The customer's Secret Token is malformed."
- logger.exception(extra["reason"])
- return HttpResponse(status=400, reason=extra["reason"])
- except Exception:
- logger.info("gitlab.webhook.invalid-token", extra=extra)
- extra["reason"] = "Generic catch-all error."
- logger.exception(extra["reason"])
- return HttpResponse(status=400, reason=extra["reason"])
-
-
@region_silo_endpoint
-class GitlabWebhookEndpoint(Endpoint, GitlabWebhookMixin):
+class GitlabWebhookEndpoint(Endpoint):
owner = ApiOwner.INTEGRATIONS
publish_status = {
"POST": ApiPublishStatus.PRIVATE,
@@ -261,7 +262,7 @@ class GitlabWebhookEndpoint(Endpoint, GitlabWebhookMixin):
permission_classes = ()
provider = "gitlab"
- _handlers: dict[str, type[Webhook]] = {
+ _handlers: dict[str, type[GitlabWebhook]] = {
"Push Hook": PushEventWebhook,
"Merge Request Hook": MergeEventWebhook,
}
@@ -282,7 +283,7 @@ def post(self, request: HttpRequest) -> HttpResponse:
# AppPlatformEvents also hit this API
"event-type": request.META.get("HTTP_X_GITLAB_EVENT"),
}
- result = self._get_external_id(request=request, extra=extra)
+ result = get_gitlab_external_id(request=request, extra=extra)
if isinstance(result, HttpResponse):
return result
(external_id, secret) = result
@@ -351,8 +352,8 @@ def post(self, request: HttpRequest) -> HttpResponse:
with IntegrationWebhookEvent(
interaction_type=event_handler.event_type,
domain=IntegrationDomain.SOURCE_CODE_MANAGEMENT,
- provider_key="gitlab",
+ provider_key=event_handler.provider,
).capture():
- event_handler(integration, organization, event)
+ event_handler(event, integration=integration, organization=organization)
return HttpResponse(status=204)
diff --git a/src/sentry/integrations/jira/actions/form.py b/src/sentry/integrations/jira/actions/form.py
index 7c5d1f7acfe2b9..22013a1f9872d0 100644
--- a/src/sentry/integrations/jira/actions/form.py
+++ b/src/sentry/integrations/jira/actions/form.py
@@ -14,6 +14,8 @@ class JiraNotifyServiceForm(IntegrationNotifyServiceForm):
def clean(self) -> dict[str, Any] | None:
cleaned_data = super().clean()
+ if cleaned_data is None:
+ return None
integration_id = cleaned_data.get("integration")
integration = integration_service.get_integration(
diff --git a/src/sentry/integrations/messaging/metrics.py b/src/sentry/integrations/messaging/metrics.py
index d2e1da56bb72b0..82baec8eec3653 100644
--- a/src/sentry/integrations/messaging/metrics.py
+++ b/src/sentry/integrations/messaging/metrics.py
@@ -46,6 +46,9 @@ class MessagingInteractionType(StrEnum):
SEND_INCIDENT_ALERT_NOTIFICATION = "SEND_INCIDENT_ALERT_NOTIFICATION"
SEND_ISSUE_ALERT_NOTIFICATION = "SEND_ISSUE_ALERT_NOTIFICATION"
+ SEND_ACTIVITY_NOTIFICATION = "SEND_ACTIVITY_NOTIFICATION"
+ SEND_GENERIC_NOTIFICATION = "SEND_GENERIC_NOTIFICATION"
+
@dataclass
class MessagingInteractionEvent(IntegrationEventLifecycleMetric):
@@ -84,7 +87,6 @@ class MessageCommandHaltReason(StrEnum):
# Team Linking
LINK_FROM_CHANNEL = "link_from_channel"
LINK_USER_FIRST = "link_user_first"
- CHANNEL_ALREADY_LINKED = "channel_already_linked"
TEAM_NOT_LINKED = "team_not_linked"
INSUFFICIENT_ROLE = "insufficient_role"
diff --git a/src/sentry/integrations/middleware/hybrid_cloud/parser.py b/src/sentry/integrations/middleware/hybrid_cloud/parser.py
index b1740f1e250b87..e1226f7c55e020 100644
--- a/src/sentry/integrations/middleware/hybrid_cloud/parser.py
+++ b/src/sentry/integrations/middleware/hybrid_cloud/parser.py
@@ -12,6 +12,7 @@
from rest_framework import status
from sentry.api.base import ONE_DAY
+from sentry.constants import ObjectStatus
from sentry.hybridcloud.models.webhookpayload import WebhookPayload
from sentry.hybridcloud.outbox.category import WebhookProviderIdentifier
from sentry.hybridcloud.services.organization_mapping import organization_mapping_service
@@ -366,7 +367,8 @@ def get_organizations_from_integration(
logger.info("%s.no_integration", self.provider, extra={"path": self.request.path})
raise Integration.DoesNotExist()
organization_integrations = OrganizationIntegration.objects.filter(
- integration_id=integration.id
+ integration_id=integration.id,
+ status=ObjectStatus.ACTIVE,
)
if organization_integrations.count() == 0:
diff --git a/src/sentry/integrations/msteams/actions/notification.py b/src/sentry/integrations/msteams/actions/notification.py
index 7c3624401ae0df..982c3e7cde7801 100644
--- a/src/sentry/integrations/msteams/actions/notification.py
+++ b/src/sentry/integrations/msteams/actions/notification.py
@@ -2,12 +2,18 @@
from sentry import features
from sentry.eventstore.models import GroupEvent
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
from sentry.integrations.msteams.actions.form import MsTeamsNotifyServiceForm
from sentry.integrations.msteams.card_builder.issues import MSTeamsIssueMessageBuilder
from sentry.integrations.msteams.client import MsTeamsClient
+from sentry.integrations.msteams.spec import MsTeamsMessagingSpec
from sentry.integrations.msteams.utils import get_channel_id
from sentry.integrations.services.integration import RpcIntegration
from sentry.rules.actions import IntegrationEventAction
+from sentry.shared_integrations.exceptions import ApiError
from sentry.utils import metrics
@@ -54,7 +60,15 @@ def send_notification(event, futures):
).build_group_card(notification_uuid=notification_uuid)
client = MsTeamsClient(integration)
- client.send_card(channel, card)
+ with MessagingInteractionEvent(
+ interaction_type=MessagingInteractionType.SEND_ISSUE_ALERT_NOTIFICATION,
+ spec=MsTeamsMessagingSpec(),
+ ).capture() as lifecycle:
+ lifecycle.add_extras({"integration_id": integration.id, "channel": channel})
+ try:
+ client.send_card(channel, card)
+ except ApiError as e:
+ lifecycle.record_failure(e)
rule = rules[0] if rules else None
self.record_notification_sent(event, channel, rule, notification_uuid)
diff --git a/src/sentry/integrations/pagerduty/actions/notification.py b/src/sentry/integrations/pagerduty/actions/notification.py
index 3cff720b0e41dd..54e48290894ae2 100644
--- a/src/sentry/integrations/pagerduty/actions/notification.py
+++ b/src/sentry/integrations/pagerduty/actions/notification.py
@@ -6,7 +6,6 @@
import sentry_sdk
-from sentry import features
from sentry.integrations.pagerduty.actions import PagerDutyNotifyServiceForm
from sentry.integrations.pagerduty.client import PAGERDUTY_DEFAULT_SEVERITY, PagerdutySeverity
from sentry.rules.actions import IntegrationEventAction
@@ -18,17 +17,13 @@
class PagerDutyNotifyServiceAction(IntegrationEventAction):
id = "sentry.integrations.pagerduty.notify_action.PagerDutyNotifyServiceAction"
form_cls = PagerDutyNotifyServiceForm
- old_label = "Send a notification to PagerDuty account {account} and service {service}"
- new_label = "Send a notification to PagerDuty account {account} and service {service} with {severity} severity"
+ label = "Send a notification to PagerDuty account {account} and service {service} with {severity} severity"
prompt = "Send a PagerDuty notification"
provider = "pagerduty"
integration_key = "account"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.has_feature_flag = features.has(
- "organizations:integrations-custom-alert-priorities", self.project.organization
- )
self.form_fields = {
"account": {
"type": "choice",
@@ -46,7 +41,6 @@ def __init__(self, *args, **kwargs):
],
},
}
- self.__class__.label = self.new_label if self.has_feature_flag else self.old_label
def _get_service(self):
oi = self.get_organization_integration()
diff --git a/src/sentry/integrations/pipeline.py b/src/sentry/integrations/pipeline.py
index 38d15d5864bb52..734210c82606de 100644
--- a/src/sentry/integrations/pipeline.py
+++ b/src/sentry/integrations/pipeline.py
@@ -15,6 +15,7 @@
from sentry.integrations.models.organization_integration import OrganizationIntegration
from sentry.models.organizationmapping import OrganizationMapping
from sentry.organizations.absolute_url import generate_organization_url
+from sentry.organizations.services.organization import organization_service
from sentry.pipeline import Pipeline, PipelineAnalyticsEntry
from sentry.shared_integrations.exceptions import IntegrationError, IntegrationProviderError
from sentry.silo.base import SiloMode
@@ -94,6 +95,29 @@ def initialize(self) -> None:
)
def finish_pipeline(self):
+ org_context = organization_service.get_organization_by_id(
+ id=self.organization.id, user_id=self.request.user.id
+ )
+
+ if (
+ org_context
+ and org_context.member
+ and "org:integrations" not in org_context.member.scopes
+ ):
+ error_message = (
+ "You must be an organization owner, manager or admin to install this integration."
+ )
+ logger.info(
+ "build-integration.permission_error",
+ extra={
+ "error_message": error_message,
+ "organization_id": self.organization.id,
+ "user_id": self.request.user.id,
+ "provider_key": self.provider.key,
+ },
+ )
+ return self.error(error_message)
+
try:
data = self.provider.build_integration(self.state.data)
except IntegrationError as e:
diff --git a/src/sentry/integrations/project_management/metrics.py b/src/sentry/integrations/project_management/metrics.py
index 782bb9c2f4021d..09c113140409c4 100644
--- a/src/sentry/integrations/project_management/metrics.py
+++ b/src/sentry/integrations/project_management/metrics.py
@@ -15,9 +15,7 @@ class ProjectManagementActionType(StrEnum):
OUTBOUND_STATUS_SYNC = "outbound_status_sync"
INBOUND_STATUS_SYNC = "inbound_status_sync"
LINK_EXTERNAL_ISSUE = "link_external_issue"
-
- def __str__(self):
- return self.value.lower()
+ CREATE_EXTERNAL_ISSUE_VIA_ISSUE_DETAIL = "create_external_issue_via_issue_detail"
class ProjectManagementHaltReason(StrEnum):
diff --git a/src/sentry/integrations/services/integration/impl.py b/src/sentry/integrations/services/integration/impl.py
index cbf01a3c334649..e4a75ee868cbd3 100644
--- a/src/sentry/integrations/services/integration/impl.py
+++ b/src/sentry/integrations/services/integration/impl.py
@@ -12,11 +12,16 @@
from sentry.constants import SentryAppInstallationStatus
from sentry.hybridcloud.rpc.pagination import RpcPaginationArgs, RpcPaginationResult
from sentry.incidents.models.incident import INCIDENT_STATUS, IncidentStatus
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
from sentry.integrations.mixins import NotifyBasicMixin
from sentry.integrations.models.integration import Integration
from sentry.integrations.models.integration_external_project import IntegrationExternalProject
from sentry.integrations.models.organization_integration import OrganizationIntegration
from sentry.integrations.msteams import MsTeamsClient
+from sentry.integrations.msteams.spec import MsTeamsMessagingSpec
from sentry.integrations.services.integration import (
IntegrationService,
RpcIntegration,
@@ -37,7 +42,6 @@
from sentry.sentry_apps.api.serializers.app_platform_event import AppPlatformEvent
from sentry.sentry_apps.models.sentry_app import SentryApp
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
-from sentry.shared_integrations.exceptions import ApiError
from sentry.utils import json, metrics
from sentry.utils.sentry_apps import send_and_save_webhook_request
@@ -447,12 +451,20 @@ def send_msteams_incident_alert_notification(
) -> bool:
integration = Integration.objects.get(id=integration_id)
client = MsTeamsClient(integration)
- try:
- client.send_card(channel, attachment)
- return True
- except ApiError:
- logger.info("rule.fail.msteams_post", exc_info=True)
- return False
+
+ with MessagingInteractionEvent(
+ interaction_type=MessagingInteractionType.SEND_INCIDENT_ALERT_NOTIFICATION,
+ spec=MsTeamsMessagingSpec(),
+ ).capture() as lifecycle:
+ try:
+ client.send_card(channel, attachment)
+ return True
+ except Exception as e:
+ # TODO(iamrajjoshi): Remove the logger after we audit lifecycle
+ logger.info("rule.fail.msteams_post", exc_info=True)
+ lifecycle.add_extras({"integration_id": integration_id, "channel": channel})
+ lifecycle.record_failure(e)
+ return False
def delete_integration(self, *, integration_id: int) -> None:
try:
diff --git a/src/sentry/integrations/slack/actions/notification.py b/src/sentry/integrations/slack/actions/notification.py
index da4ba68e335e39..9854b42a8f56d1 100644
--- a/src/sentry/integrations/slack/actions/notification.py
+++ b/src/sentry/integrations/slack/actions/notification.py
@@ -68,10 +68,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None:
"channel": {"type": "string", "placeholder": "e.g., #critical, Jane Schmidt"},
"channel_id": {"type": "string", "placeholder": "e.g., CA2FRA079 or UA1J9RTE1"},
"tags": {"type": "string", "placeholder": "e.g., environment,user,my_tag"},
- }
- self.form_fields["notes"] = {
- "type": "string",
- "placeholder": "e.g. @jane, @on-call-team",
+ "notes": {"type": "string", "placeholder": "e.g., @jane, @on-call-team"},
}
self._repository: IssueAlertNotificationMessageRepository = (
diff --git a/src/sentry/integrations/slack/message_builder/base/base.py b/src/sentry/integrations/slack/message_builder/base/base.py
index a91f1c473c8495..bc3f756bf2399c 100644
--- a/src/sentry/integrations/slack/message_builder/base/base.py
+++ b/src/sentry/integrations/slack/message_builder/base/base.py
@@ -40,10 +40,3 @@ def build_fallback_text(self, obj: Group | Event | GroupEvent, project_slug: str
title = obj.occurrence.issue_title
return f"[{project_slug}] {title}"
-
- @property
- def escape_text(self) -> bool:
- """
- Returns True if we need to escape the text in the message.
- """
- return False
diff --git a/src/sentry/integrations/slack/message_builder/issues.py b/src/sentry/integrations/slack/message_builder/issues.py
index 95eaee8aa32224..c64a37013de558 100644
--- a/src/sentry/integrations/slack/message_builder/issues.py
+++ b/src/sentry/integrations/slack/message_builder/issues.py
@@ -36,12 +36,7 @@
from sentry.integrations.time_utils import get_approx_start_time, time_since
from sentry.integrations.types import ExternalProviders
from sentry.issues.endpoints.group_details import get_group_global_count
-from sentry.issues.grouptype import (
- GroupCategory,
- NotificationContextField,
- PerformanceP95EndpointRegressionGroupType,
- ProfileFunctionRegressionType,
-)
+from sentry.issues.grouptype import GroupCategory, NotificationContextField
from sentry.models.commit import Commit
from sentry.models.group import Group, GroupStatus
from sentry.models.project import Project
@@ -107,11 +102,6 @@ def get_group_users_count(group: Group, rules: list[Rule] | None = None) -> int:
}
-REGRESSION_PERFORMANCE_ISSUE_TYPES = [
- PerformanceP95EndpointRegressionGroupType,
- ProfileFunctionRegressionType,
-]
-
logger = logging.getLogger(__name__)
@@ -456,13 +446,6 @@ def __init__(
self.skip_fallback = skip_fallback
self.notes = notes
- @property
- def escape_text(self) -> bool:
- """
- Returns True if we need to escape the text in the message.
- """
- return True
-
def get_title_block(
self,
event_or_group: Event | GroupEvent | Group,
diff --git a/src/sentry/integrations/slack/metrics.py b/src/sentry/integrations/slack/metrics.py
index 18a6364c7877c3..fe5c52da59e973 100644
--- a/src/sentry/integrations/slack/metrics.py
+++ b/src/sentry/integrations/slack/metrics.py
@@ -1,5 +1,13 @@
# metrics constants
+from slack_sdk.errors import SlackApiError
+
+from sentry.integrations.slack.utils.errors import (
+ SLACK_SDK_HALT_ERROR_CATEGORIES,
+ unpack_slack_api_error,
+)
+from sentry.integrations.utils.metrics import EventLifecycle
+
SLACK_ISSUE_ALERT_SUCCESS_DATADOG_METRIC = "sentry.integrations.slack.issue_alert.success"
SLACK_ISSUE_ALERT_FAILURE_DATADOG_METRIC = "sentry.integrations.slack.issue_alert.failure"
SLACK_ACTIVITY_THREAD_SUCCESS_DATADOG_METRIC = "sentry.integrations.slack.activity_thread.success"
@@ -9,28 +17,6 @@
SLACK_NOTIFY_RECIPIENT_SUCCESS_DATADOG_METRIC = "sentry.integrations.slack.notify_recipient.success"
SLACK_NOTIFY_RECIPIENT_FAILURE_DATADOG_METRIC = "sentry.integrations.slack.notify_recipient.failure"
-# Bot commands
-SLACK_BOT_COMMAND_LINK_IDENTITY_SUCCESS_DATADOG_METRIC = (
- "sentry.integrations.slack.link_identity_view.success"
-)
-SLACK_BOT_COMMAND_LINK_IDENTITY_FAILURE_DATADOG_METRIC = (
- "sentry.integrations.slack.link_identity_view.failure"
-)
-SLACK_BOT_COMMAND_UNLINK_IDENTITY_SUCCESS_DATADOG_METRIC = (
- "sentry.integrations.slack.unlink_identity_view.success"
-)
-SLACK_BOT_COMMAND_UNLINK_IDENTITY_FAILURE_DATADOG_METRIC = (
- "sentry.integrations.slack.unlink_identity_view.failure"
-)
-SLACK_BOT_COMMAND_UNLINK_TEAM_SUCCESS_DATADOG_METRIC = (
- "sentry.integrations.slack.unlink_team.success"
-)
-SLACK_BOT_COMMAND_UNLINK_TEAM_FAILURE_DATADOG_METRIC = (
- "sentry.integrations.slack.unlink_team.failure"
-)
-SLACK_BOT_COMMAND_LINK_TEAM_SUCCESS_DATADOG_METRIC = "sentry.integrations.slack.link_team.success"
-SLACK_BOT_COMMAND_LINK_TEAM_FAILURE_DATADOG_METRIC = "sentry.integrations.slack.link_team.failure"
-
# Webhooks
SLACK_WEBHOOK_DM_ENDPOINT_SUCCESS_DATADOG_METRIC = "sentry.integrations.slack.dm_endpoint.success"
SLACK_WEBHOOK_DM_ENDPOINT_FAILURE_DATADOG_METRIC = "sentry.integrations.slack.dm_endpoint.failure"
@@ -79,3 +65,14 @@
# Middleware Parsers
SLACK_MIDDLE_PARSERS_SUCCESS_DATADOG_METRIC = "sentry.middleware.integrations.slack.parsers.success"
SLACK_MIDDLE_PARSERS_FAILURE_DATADOG_METRIC = "sentry.middleware.integrations.slack.parsers.failure"
+
+
+def record_lifecycle_termination_level(lifecycle: EventLifecycle, error: SlackApiError) -> None:
+ if (
+ (reason := unpack_slack_api_error(error))
+ and reason is not None
+ and reason in SLACK_SDK_HALT_ERROR_CATEGORIES
+ ):
+ lifecycle.record_halt(reason.message)
+ else:
+ lifecycle.record_failure(error)
diff --git a/src/sentry/integrations/slack/requests/base.py b/src/sentry/integrations/slack/requests/base.py
index f546bd9ff2f253..2b7bf34ea9f2b8 100644
--- a/src/sentry/integrations/slack/requests/base.py
+++ b/src/sentry/integrations/slack/requests/base.py
@@ -73,7 +73,6 @@ def validate(self) -> None:
"""
Ensure everything is present to properly process this request
"""
- self.request.body
self._log_request()
self._get_context()
self.authorize()
diff --git a/src/sentry/integrations/slack/service.py b/src/sentry/integrations/slack/service.py
index 9b2dd314ca53f2..df0511ab1510ab 100644
--- a/src/sentry/integrations/slack/service.py
+++ b/src/sentry/integrations/slack/service.py
@@ -10,6 +10,10 @@
from slack_sdk.errors import SlackApiError
from sentry.constants import ISSUE_ALERTS_THREAD_DEFAULT
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
from sentry.integrations.models.integration import Integration
from sentry.integrations.notifications import get_context
from sentry.integrations.repository import get_default_issue_alert_repository
@@ -25,8 +29,10 @@
SLACK_ACTIVITY_THREAD_SUCCESS_DATADOG_METRIC,
SLACK_NOTIFY_RECIPIENT_FAILURE_DATADOG_METRIC,
SLACK_NOTIFY_RECIPIENT_SUCCESS_DATADOG_METRIC,
+ record_lifecycle_termination_level,
)
from sentry.integrations.slack.sdk_client import SlackSdkClient
+from sentry.integrations.slack.spec import SlackMessagingSpec
from sentry.integrations.slack.threads.activity_notifications import (
AssignedActivityNotification,
ExternalIssueCreatedActivityNotification,
@@ -182,12 +188,23 @@ def notify_all_threads_for_activity(self, activity: Activity) -> None:
slack_client = SlackSdkClient(integration_id=integration.id)
# Get all parent notifications, which will have the message identifier to use to reply in a thread
- parent_notifications = (
- self._notification_message_repository.get_all_parent_notification_messages_by_filters(
+ with MessagingInteractionEvent(
+ interaction_type=MessagingInteractionType.GET_PARENT_NOTIFICATION,
+ spec=SlackMessagingSpec(),
+ ).capture() as lifecycle:
+ lifecycle.add_extras(
+ {
+ "activity_id": activity.id,
+ "group_id": activity.group.id,
+ "project_id": activity.project.id,
+ }
+ )
+ parent_notifications = self._notification_message_repository.get_all_parent_notification_messages_by_filters(
group_ids=[activity.group.id],
project_ids=[activity.project.id],
)
- )
+
+ # We don't wrap this in a lifecycle because _handle_parent_notification is already wrapped in a lifecycle
for parent_notification in parent_notifications:
try:
self._handle_parent_notification(
@@ -196,6 +213,7 @@ def notify_all_threads_for_activity(self, activity: Activity) -> None:
client=slack_client,
)
except Exception as err:
+ # TODO(iamrajjoshi): We can probably swallow this error once we audit the lifecycle
self._logger.info(
"failed to send notification",
exc_info=err,
@@ -254,25 +272,33 @@ def _handle_parent_notification(
"rule_action_uuid": parent_notification.rule_action_uuid,
}
- try:
- client.chat_postMessage(
- channel=channel_id,
- thread_ts=parent_notification.message_identifier,
- text=notification_to_send,
- blocks=json_blocks,
- )
- metrics.incr(SLACK_ACTIVITY_THREAD_SUCCESS_DATADOG_METRIC, sample_rate=1.0)
- except SlackApiError as e:
- self._logger.info(
- "failed to post message to slack",
- extra={"error": str(e), "blocks": json_blocks, **extra},
- )
- metrics.incr(
- SLACK_ACTIVITY_THREAD_FAILURE_DATADOG_METRIC,
- sample_rate=1.0,
- tags={"ok": e.response.get("ok", False), "status": e.response.status_code},
- )
- raise
+ with MessagingInteractionEvent(
+ interaction_type=MessagingInteractionType.SEND_ACTIVITY_NOTIFICATION,
+ spec=SlackMessagingSpec(),
+ ).capture() as lifecycle:
+ try:
+ client.chat_postMessage(
+ channel=channel_id,
+ thread_ts=parent_notification.message_identifier,
+ text=notification_to_send,
+ blocks=json_blocks,
+ )
+ # TODO(iamrajjoshi): Remove this after we validate lifecycle
+ metrics.incr(SLACK_ACTIVITY_THREAD_SUCCESS_DATADOG_METRIC, sample_rate=1.0)
+ except SlackApiError as e:
+ # TODO(iamrajjoshi): Remove this after we validate lifecycle
+ self._logger.info(
+ "failed to post message to slack",
+ extra={"error": str(e), "blocks": json_blocks, **extra},
+ )
+ metrics.incr(
+ SLACK_ACTIVITY_THREAD_FAILURE_DATADOG_METRIC,
+ sample_rate=1.0,
+ tags={"ok": e.response.get("ok", False), "status": e.response.status_code},
+ )
+ lifecycle.add_extras({"rule_action_uuid": parent_notification.rule_action_uuid})
+ record_lifecycle_termination_level(lifecycle, e)
+ raise
def _get_notification_message_to_send(self, activity: Activity) -> str | None:
"""
@@ -427,21 +453,32 @@ def send_message_to_slack_channel(
"""Execution of send_notification_as_slack."""
client = SlackSdkClient(integration_id=integration_id)
- try:
- client.chat_postMessage(
- blocks=str(payload.get("blocks", "")),
- text=str(payload.get("text", "")),
- channel=str(payload.get("channel", "")),
- unfurl_links=False,
- unfurl_media=False,
- callback_id=str(payload.get("callback_id", "")),
- )
- metrics.incr(SLACK_NOTIFY_RECIPIENT_SUCCESS_DATADOG_METRIC, sample_rate=1.0)
- except SlackApiError as e:
- extra = {"error": str(e), **log_params}
- self._logger.info(log_error_message, extra=extra)
- metrics.incr(
- SLACK_NOTIFY_RECIPIENT_FAILURE_DATADOG_METRIC,
- sample_rate=1.0,
- tags={"ok": e.response.get("ok", False), "status": e.response.status_code},
- )
+ with MessagingInteractionEvent(
+ interaction_type=MessagingInteractionType.SEND_GENERIC_NOTIFICATION,
+ spec=SlackMessagingSpec(),
+ ).capture() as lifecycle:
+ try:
+ lifecycle.add_extras({"integration_id": integration_id})
+ client.chat_postMessage(
+ blocks=str(payload.get("blocks", "")),
+ text=str(payload.get("text", "")),
+ channel=str(payload.get("channel", "")),
+ unfurl_links=False,
+ unfurl_media=False,
+ callback_id=str(payload.get("callback_id", "")),
+ )
+ # TODO(iamrajjoshi): Remove this after we validate lifecycle
+ metrics.incr(SLACK_NOTIFY_RECIPIENT_SUCCESS_DATADOG_METRIC, sample_rate=1.0)
+ except SlackApiError as e:
+ # TODO(iamrajjoshi): Remove this after we validate lifecycle
+ extra = {"error": str(e), **log_params}
+ self._logger.info(log_error_message, extra=extra)
+ metrics.incr(
+ SLACK_NOTIFY_RECIPIENT_FAILURE_DATADOG_METRIC,
+ sample_rate=1.0,
+ tags={"ok": e.response.get("ok", False), "status": e.response.status_code},
+ )
+ lifecycle.add_extras(
+ {k: str(v) for k, v in log_params.items() if isinstance(v, (int, str))}
+ )
+ record_lifecycle_termination_level(lifecycle, e)
diff --git a/src/sentry/integrations/slack/utils/notifications.py b/src/sentry/integrations/slack/utils/notifications.py
index 32a2edebc34058..3923bd21c19632 100644
--- a/src/sentry/integrations/slack/utils/notifications.py
+++ b/src/sentry/integrations/slack/utils/notifications.py
@@ -31,13 +31,10 @@
SLACK_LINK_IDENTITY_MSG_SUCCESS_DATADOG_METRIC,
SLACK_METRIC_ALERT_FAILURE_DATADOG_METRIC,
SLACK_METRIC_ALERT_SUCCESS_DATADOG_METRIC,
+ record_lifecycle_termination_level,
)
from sentry.integrations.slack.sdk_client import SlackSdkClient
from sentry.integrations.slack.spec import SlackMessagingSpec
-from sentry.integrations.slack.utils.errors import (
- SLACK_SDK_HALT_ERROR_CATEGORIES,
- unpack_slack_api_error,
-)
from sentry.models.options.organization_option import OrganizationOption
from sentry.utils import metrics
@@ -176,14 +173,7 @@ def send_incident_alert_notification(
lifecycle.add_extras(log_params)
# If the error is a channel not found or archived, we can halt the flow
# This means that the channel was deleted or archived after the alert rule was created
- if (
- (reason := unpack_slack_api_error(e))
- and reason is not None
- and reason in SLACK_SDK_HALT_ERROR_CATEGORIES
- ):
- lifecycle.record_halt(reason.message)
- else:
- lifecycle.record_failure(e)
+ record_lifecycle_termination_level(lifecycle, e)
else:
success = True
diff --git a/src/sentry/integrations/slack/views/__init__.py b/src/sentry/integrations/slack/views/__init__.py
index 9e0747e9003b25..bd7c1aa6ea9c5c 100644
--- a/src/sentry/integrations/slack/views/__init__.py
+++ b/src/sentry/integrations/slack/views/__init__.py
@@ -2,23 +2,15 @@
from django.http import HttpRequest, HttpResponse
from django.urls import reverse
-from django.views.decorators.cache import never_cache as django_never_cache
from rest_framework.request import Request
from sentry.utils.http import absolute_uri
from sentry.utils.signing import sign
-from sentry.web.decorators import EndpointFunc
from sentry.web.helpers import render_to_response
SALT = "sentry-slack-integration"
-def never_cache(view_func: EndpointFunc) -> EndpointFunc:
- """TODO(mgaeta): Remove cast once Django has a typed version."""
- result: EndpointFunc = django_never_cache(view_func)
- return result
-
-
def build_linking_url(endpoint: str, **kwargs: Any) -> str:
"""TODO(mgaeta): Remove cast once sentry/utils/http.py is typed."""
url: str = absolute_uri(reverse(endpoint, kwargs={"signed_params": sign(salt=SALT, **kwargs)}), options.get("slack.url-prefix"))
diff --git a/src/sentry/integrations/slack/webhooks/action.py b/src/sentry/integrations/slack/webhooks/action.py
index a4f74148431940..2e595016bee4c7 100644
--- a/src/sentry/integrations/slack/webhooks/action.py
+++ b/src/sentry/integrations/slack/webhooks/action.py
@@ -118,15 +118,7 @@ def update_group(
status_code=403, body="The user does not have access to the organization."
)
- return update_groups(
- request=request,
- group_ids=[group.id],
- projects=[group.project],
- organization_id=group.organization.id,
- search_fn=None,
- user=user,
- data=data,
- )
+ return update_groups(request=request, groups=[group], user=user, data=data)
def get_rule(slack_request: SlackActionRequest) -> Rule | None:
@@ -414,6 +406,7 @@ def _handle_group_actions(
)
view = View(**slack_request.data["view"])
+ assert view.private_metadata is not None
private_metadata = orjson.loads(view.private_metadata)
original_tags_from_request = set(private_metadata.get("tags", {}))
diff --git a/src/sentry/integrations/slack/webhooks/base.py b/src/sentry/integrations/slack/webhooks/base.py
index fba29ed49d3b62..0699aa9e3aab93 100644
--- a/src/sentry/integrations/slack/webhooks/base.py
+++ b/src/sentry/integrations/slack/webhooks/base.py
@@ -136,7 +136,6 @@ class SlackCommandDispatcher(MessagingIntegrationCommandDispatcher[Response]):
@property
def TEAM_HALT_MAPPINGS(self) -> dict[str, MessageCommandHaltReason]:
from sentry.integrations.slack.webhooks.command import (
- CHANNEL_ALREADY_LINKED_MESSAGE,
INSUFFICIENT_ROLE_MESSAGE,
LINK_FROM_CHANNEL_MESSAGE,
LINK_USER_FIRST_MESSAGE,
@@ -147,7 +146,6 @@ def TEAM_HALT_MAPPINGS(self) -> dict[str, MessageCommandHaltReason]:
LINK_FROM_CHANNEL_MESSAGE: MessageCommandHaltReason.LINK_FROM_CHANNEL,
LINK_USER_FIRST_MESSAGE: MessageCommandHaltReason.LINK_USER_FIRST,
INSUFFICIENT_ROLE_MESSAGE: MessageCommandHaltReason.INSUFFICIENT_ROLE,
- CHANNEL_ALREADY_LINKED_MESSAGE: MessageCommandHaltReason.CHANNEL_ALREADY_LINKED,
TEAM_NOT_LINKED_MESSAGE: MessageCommandHaltReason.TEAM_NOT_LINKED,
}
@@ -200,7 +198,7 @@ def link_team_handler(self, input: CommandInput) -> IntegrationResponse[Response
for message, reason in self.TEAM_HALT_MAPPINGS.items():
if message in str(response.data):
return IntegrationResponse(
- interaction_result=EventLifecycleOutcome.SUCCESS,
+ interaction_result=EventLifecycleOutcome.HALTED,
response=response,
outcome_reason=str(reason),
)
@@ -215,7 +213,7 @@ def unlink_team_handler(self, input: CommandInput) -> IntegrationResponse[Respon
for message, reason in self.TEAM_HALT_MAPPINGS.items():
if message in str(response.data):
return IntegrationResponse(
- interaction_result=EventLifecycleOutcome.SUCCESS,
+ interaction_result=EventLifecycleOutcome.HALTED,
response=response,
outcome_reason=str(reason),
)
diff --git a/src/sentry/integrations/slack/webhooks/command.py b/src/sentry/integrations/slack/webhooks/command.py
index ea0f111566da4d..575d983f72dd7e 100644
--- a/src/sentry/integrations/slack/webhooks/command.py
+++ b/src/sentry/integrations/slack/webhooks/command.py
@@ -6,7 +6,6 @@
from rest_framework.request import Request
from rest_framework.response import Response
-from sentry import features
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
@@ -27,7 +26,7 @@
from sentry.models.organizationmember import OrganizationMember
from sentry.utils import metrics
-_logger = logging.getLogger(__name__)
+_logger = logging.getLogger("sentry.integration.slack.bot-commands")
from .base import SlackDMEndpoint
@@ -35,7 +34,6 @@
"Link your Sentry team to this Slack channel! <{associate_url}|Link your team now> to receive "
"notifications of issues in Sentry in Slack."
)
-CHANNEL_ALREADY_LINKED_MESSAGE = "This channel already has a team linked to it."
LINK_USER_FIRST_MESSAGE = (
"You must first link your identity to Sentry by typing /sentry link. Be aware that you "
"must be an admin or higher in your Sentry organization or a team admin to link your team."
@@ -86,9 +84,7 @@ def link_team(self, slack_request: SlackDMRequest) -> Response:
if slack_request.channel_name == DIRECT_MESSAGE_CHANNEL_NAME:
return self.reply(slack_request, LINK_FROM_CHANNEL_MESSAGE)
- logger_params = {
- "slack_request": slack_request,
- }
+ logger_params = {}
identity_user = slack_request.get_identity_user()
if not identity_user:
@@ -103,17 +99,11 @@ def link_team(self, slack_request: SlackDMRequest) -> Response:
has_valid_role = False
for organization_membership in organization_memberships:
- if not features.has(
- "organizations:slack-multiple-team-single-channel-linking",
- organization_membership.organization,
- ) and is_team_linked_to_channel(organization_membership.organization, slack_request):
- return self.reply(slack_request, CHANNEL_ALREADY_LINKED_MESSAGE)
-
if is_valid_role(organization_membership) or is_team_admin(organization_membership):
has_valid_role = True
if not has_valid_role:
- _logger.info("insufficient-role", extra=logger_params)
+ _logger.error("insufficient-role", extra=logger_params)
metrics.incr(
self._METRICS_FAILURE_KEY + ".link_team.insufficient_role", sample_rate=1.0
)
diff --git a/src/sentry/integrations/source_code_management/repository.py b/src/sentry/integrations/source_code_management/repository.py
index bf737cdfda0062..3fbb2f746342d9 100644
--- a/src/sentry/integrations/source_code_management/repository.py
+++ b/src/sentry/integrations/source_code_management/repository.py
@@ -153,7 +153,17 @@ def get_stacktrace_link(
If no file was found return `None`, and re-raise for non-"Not Found"
errors, like 403 "Account Suspended".
"""
- with self.record_event(SCMIntegrationInteractionType.GET_STACKTRACE_LINK).capture():
+ with self.record_event(
+ SCMIntegrationInteractionType.GET_STACKTRACE_LINK
+ ).capture() as lifecycle:
+ lifecycle.add_extras(
+ {
+ "filepath": filepath,
+ "default": default,
+ "version": version,
+ "organization_id": repo.organization_id,
+ }
+ )
scope = sentry_sdk.Scope.get_isolation_scope()
scope.set_tag("stacktrace_link.tried_version", False)
if version:
@@ -182,7 +192,15 @@ def get_codeowner_file(
* filepath - full path of the file i.e. CODEOWNERS, .github/CODEOWNERS, docs/CODEOWNERS
* raw - the decoded raw contents of the codeowner file
"""
- with self.record_event(SCMIntegrationInteractionType.GET_CODEOWNER_FILE).capture():
+ with self.record_event(
+ SCMIntegrationInteractionType.GET_CODEOWNER_FILE
+ ).capture() as lifecycle:
+ lifecycle.add_extras(
+ {
+ "ref": ref,
+ "organization_id": repo.organization_id,
+ }
+ )
if self.codeowners_locations is None:
raise NotImplementedError("Implement self.codeowners_locations to use this method.")
diff --git a/src/sentry/integrations/source_code_management/webhook.py b/src/sentry/integrations/source_code_management/webhook.py
new file mode 100644
index 00000000000000..bc0eef1ae39cf9
--- /dev/null
+++ b/src/sentry/integrations/source_code_management/webhook.py
@@ -0,0 +1,26 @@
+from abc import ABC, abstractmethod
+from collections.abc import Mapping
+from typing import Any
+
+from sentry.integrations.utils.metrics import IntegrationWebhookEventType
+from sentry.models.repository import Repository
+
+
+class SCMWebhook(ABC):
+ @property
+ @abstractmethod
+ def provider(self) -> str:
+ raise NotImplementedError
+
+ @property
+ @abstractmethod
+ def event_type(self) -> IntegrationWebhookEventType:
+ raise NotImplementedError
+
+ @abstractmethod
+ def __call__(self, event: Mapping[str, Any], **kwargs) -> None:
+ raise NotImplementedError
+
+ @abstractmethod
+ def update_repo_data(self, repo: Repository, event: Mapping[str, Any]) -> None:
+ raise NotImplementedError
diff --git a/src/sentry/integrations/tasks/sync_status_inbound.py b/src/sentry/integrations/tasks/sync_status_inbound.py
index 7337a657ced6b1..d60829e5e4234e 100644
--- a/src/sentry/integrations/tasks/sync_status_inbound.py
+++ b/src/sentry/integrations/tasks/sync_status_inbound.py
@@ -1,5 +1,6 @@
import logging
from collections.abc import Iterable, Mapping
+from datetime import timedelta
from typing import Any
from django.db.models import Q
@@ -168,6 +169,20 @@ def get_resolutions_and_activity_data_for_groups(
return resolutions_by_group_id, activity_type, activity_data
+def group_was_recently_resolved(group: Group) -> bool:
+ """
+ Check if the group was resolved in the last 3 minutes
+ """
+ if group.status != GroupStatus.RESOLVED:
+ return False
+
+ try:
+ group_resolution = GroupResolution.objects.get(group=group)
+ return group_resolution.datetime > django_timezone.now() - timedelta(minutes=3)
+ except GroupResolution.DoesNotExist:
+ return False
+
+
@instrumented_task(
name="sentry.integrations.tasks.sync_status_inbound",
queue="integrations",
@@ -189,8 +204,8 @@ def sync_status_inbound(
raise Integration.DoesNotExist
organizations = Organization.objects.filter(id=organization_id)
- affected_groups = Group.objects.get_groups_by_external_issue(
- integration, organizations, issue_key
+ affected_groups = list(
+ Group.objects.get_groups_by_external_issue(integration, organizations, issue_key)
)
if not affected_groups:
return
@@ -213,6 +228,17 @@ def sync_status_inbound(
"integration_id": integration_id,
}
if action == ResolveSyncAction.RESOLVE:
+ # Check if the group was recently resolved and we should skip the request
+ # Avoid resolving the group in-app and then re-resolving via the integration webhook
+ # which would override the in-app resolution
+ resolvable_groups = []
+ for group in affected_groups:
+ if not group_was_recently_resolved(group):
+ resolvable_groups.append(group)
+
+ if not resolvable_groups:
+ return
+
(
resolutions_by_group_id,
activity_type,
@@ -221,14 +247,14 @@ def sync_status_inbound(
affected_groups, config.get("resolution_strategy"), activity_data, organization_id
)
Group.objects.update_group_status(
- groups=affected_groups,
+ groups=resolvable_groups,
status=GroupStatus.RESOLVED,
substatus=None,
activity_type=activity_type,
activity_data=activity_data,
)
# after we update the group, pdate the resolutions
- for group in affected_groups:
+ for group in resolvable_groups:
resolution_params = resolutions_by_group_id.get(group.id)
if resolution_params:
resolution, created = GroupResolution.objects.get_or_create(
diff --git a/src/sentry/integrations/utils/sync.py b/src/sentry/integrations/utils/sync.py
index 86bb58330748ae..4c15633a978081 100644
--- a/src/sentry/integrations/utils/sync.py
+++ b/src/sentry/integrations/utils/sync.py
@@ -102,6 +102,7 @@ def sync_group_assignee_inbound(
if not assign:
for group in affected_groups:
+ # XXX: Pass an acting user and make the acting_user mandatory
GroupAssignee.objects.deassign(
group,
assignment_source=AssignmentSource.from_integration(integration),
diff --git a/src/sentry/interfaces/security.py b/src/sentry/interfaces/security.py
index 2ca01d005cdc80..59ba1d6c4f5aba 100644
--- a/src/sentry/interfaces/security.py
+++ b/src/sentry/interfaces/security.py
@@ -150,7 +150,7 @@ class Csp(SecurityReport):
"""
A CSP violation report.
- See also: http://www.w3.org/TR/CSP/#violation-reports
+ See also: https://www.w3.org/TR/CSP/#violation-events
>>> {
>>> "document_uri": "http://example.com/",
diff --git a/src/sentry/issues/attributes.py b/src/sentry/issues/attributes.py
index 26d730a418ecb9..2c8f1e842c8201 100644
--- a/src/sentry/issues/attributes.py
+++ b/src/sentry/issues/attributes.py
@@ -1,8 +1,8 @@
import dataclasses
import logging
+from collections.abc import Iterable
from datetime import datetime
from enum import Enum
-from typing import cast
import urllib3
from arroyo import Topic as ArroyoTopic
@@ -93,7 +93,7 @@ def bulk_send_snapshot_values(
if group_ids is None and groups is None:
raise ValueError("cannot send snapshot values when group_ids and groups are None")
- group_list: list[Group | GroupValues] = cast(list[Group | GroupValues], groups) or []
+ group_list: list[Group | GroupValues] = [*(groups or [])]
if group_ids:
group_list.extend(_bulk_retrieve_group_values(group_ids))
@@ -128,10 +128,6 @@ def produce_snapshot_to_kafka(snapshot: GroupAttributesSnapshot) -> None:
)
-def _retrieve_group_values(group_id: int) -> GroupValues:
- return _bulk_retrieve_group_values([group_id])[0]
-
-
def _bulk_retrieve_group_values(group_ids: list[int]) -> list[GroupValues]:
group_values_map = {
group["id"]: group
@@ -167,7 +163,7 @@ def _bulk_retrieve_group_values(group_ids: list[int]) -> list[GroupValues]:
def _bulk_retrieve_snapshot_values(
- group_values_list: list[Group | GroupValues], group_deleted: bool = False
+ group_values_list: Iterable[Group | GroupValues], group_deleted: bool = False
) -> list[GroupAttributesSnapshot]:
group_assignee_map = {
ga["group_id"]: ga
diff --git a/src/sentry/issues/endpoints/__init__.py b/src/sentry/issues/endpoints/__init__.py
index 36c255daf1830a..a1f812f708f8f0 100644
--- a/src/sentry/issues/endpoints/__init__.py
+++ b/src/sentry/issues/endpoints/__init__.py
@@ -6,7 +6,6 @@
from .group_hashes import GroupHashesEndpoint
from .group_notes import GroupNotesEndpoint
from .group_notes_details import GroupNotesDetailsEndpoint
-from .group_participants import GroupParticipantsEndpoint
from .group_similar_issues import GroupSimilarIssuesEndpoint
from .group_similar_issues_embeddings import GroupSimilarIssuesEmbeddingsEndpoint
from .group_tombstone import GroupTombstoneEndpoint
@@ -39,7 +38,6 @@
"GroupHashesEndpoint",
"GroupNotesDetailsEndpoint",
"GroupNotesEndpoint",
- "GroupParticipantsEndpoint",
"GroupSimilarIssuesEmbeddingsEndpoint",
"GroupSimilarIssuesEndpoint",
"GroupTombstoneDetailsEndpoint",
diff --git a/src/sentry/issues/endpoints/actionable_items.py b/src/sentry/issues/endpoints/actionable_items.py
index 7e860fc7575b86..30087fb93faef8 100644
--- a/src/sentry/issues/endpoints/actionable_items.py
+++ b/src/sentry/issues/endpoints/actionable_items.py
@@ -1,5 +1,3 @@
-from typing import TypedDict
-
from rest_framework.exceptions import NotFound
from rest_framework.request import Request
from rest_framework.response import Response
@@ -19,16 +17,6 @@
from sentry.models.project import Project
-class ActionableItemResponse(TypedDict):
- type: str
- message: str
- data: dict | None
-
-
-class SourceMapProcessingResponse(TypedDict):
- errors: list[ActionableItemResponse]
-
-
@region_silo_endpoint
class ActionableItemsEndpoint(ProjectEndpoint):
"""
diff --git a/src/sentry/issues/endpoints/group_details.py b/src/sentry/issues/endpoints/group_details.py
index 40117285948b6c..a7675aa9323e23 100644
--- a/src/sentry/issues/endpoints/group_details.py
+++ b/src/sentry/issues/endpoints/group_details.py
@@ -18,7 +18,7 @@
delete_group_list,
get_first_last_release,
prep_search,
- update_groups,
+ update_groups_with_search_fn,
)
from sentry.api.serializers import GroupSerializer, GroupSerializerSnuba, serialize
from sentry.api.serializers.models.group_stream import get_actions, get_available_issue_plugins
@@ -83,14 +83,11 @@ class GroupDetailsEndpoint(GroupEndpoint, EnvironmentMixin):
},
}
- def _get_activity(self, request: Request, group, num):
- return Activity.objects.get_activities_for_group(group, num)
-
- def _get_seen_by(self, request: Request, group):
+ def _get_seen_by(self, request: Request, group: Group):
seen_by = list(GroupSeen.objects.filter(group=group).order_by("-last_seen"))
return [seen for seen in serialize(seen_by, request.user) if seen is not None]
- def _get_context_plugins(self, request: Request, group):
+ def _get_context_plugins(self, request: Request, group: Group):
project = group.project
return serialize(
[
@@ -105,7 +102,9 @@ def _get_context_plugins(self, request: Request, group):
)
@staticmethod
- def __group_hourly_daily_stats(group: Group, environment_ids: Sequence[int]):
+ def __group_hourly_daily_stats(
+ group: Group, environment_ids: Sequence[int]
+ ) -> tuple[list[list[float]], list[list[float]]]:
model = get_issue_tsdb_group_model(group.issue_category)
now = timezone.now()
hourly_stats = tsdb.backend.rollup(
@@ -133,7 +132,7 @@ def __group_hourly_daily_stats(group: Group, environment_ids: Sequence[int]):
return hourly_stats, daily_stats
- def get(self, request: Request, group) -> Response:
+ def get(self, request: Request, group: Group) -> Response:
"""
Retrieve an Issue
`````````````````
@@ -164,7 +163,7 @@ def get(self, request: Request, group) -> Response:
)
# TODO: these probably should be another endpoint
- activity = self._get_activity(request, group, num=100)
+ activity = Activity.objects.get_activities_for_group(group, 100)
seen_by = self._get_seen_by(request, group)
if "release" not in collapse:
@@ -317,7 +316,7 @@ def get(self, request: Request, group) -> Response:
)
raise
- def put(self, request: Request, group) -> Response:
+ def put(self, request: Request, group: Group) -> Response:
"""
Update an Issue
```````````````
@@ -329,6 +328,11 @@ def put(self, request: Request, group) -> Response:
:param string status: the new status for the issue. Valid values
are ``"resolved"``, ``resolvedInNextRelease``,
``"unresolved"``, and ``"ignored"``.
+ :param map statusDetails: additional details about the resolution.
+ Valid values are ``"inRelease"``, ``"inNextRelease"``,
+ ``"inCommit"``, ``"ignoreDuration"``, ``"ignoreCount"``,
+ ``"ignoreWindow"``, ``"ignoreUserCount"``, and
+ ``"ignoreUserWindow"``.
:param string assignedTo: the user or team that should be assigned to
this issue. Can be of the form ``""``,
``"user:"``, ``""``,
@@ -351,7 +355,7 @@ def put(self, request: Request, group) -> Response:
discard = request.data.get("discard")
project = group.project
search_fn = functools.partial(prep_search, self, request, project)
- response = update_groups(
+ response = update_groups_with_search_fn(
request, [group.id], [project], project.organization_id, search_fn
)
# if action was discard, there isn't a group to serialize anymore
diff --git a/src/sentry/issues/endpoints/group_event_details.py b/src/sentry/issues/endpoints/group_event_details.py
index fffb47f40ea3b9..686b7b20b69c6a 100644
--- a/src/sentry/issues/endpoints/group_event_details.py
+++ b/src/sentry/issues/endpoints/group_event_details.py
@@ -4,11 +4,11 @@
from collections.abc import Sequence
from django.contrib.auth.models import AnonymousUser
-from drf_spectacular.types import OpenApiTypes
-from drf_spectacular.utils import OpenApiParameter, extend_schema
+from drf_spectacular.utils import extend_schema
+from rest_framework.exceptions import ParseError
from rest_framework.request import Request
from rest_framework.response import Response
-from snuba_sdk import Condition, Or
+from snuba_sdk import Column, Condition, Op, Or
from snuba_sdk.legacy import is_condition, parse_condition
from sentry import eventstore
@@ -19,6 +19,7 @@
from sentry.api.helpers.group_index import parse_and_convert_issue_search_query
from sentry.api.helpers.group_index.validators import ValidationError
from sentry.api.serializers import EventSerializer, serialize
+from sentry.api.utils import get_date_range_from_params
from sentry.apidocs.constants import (
RESPONSE_BAD_REQUEST,
RESPONSE_FORBIDDEN,
@@ -26,9 +27,10 @@
RESPONSE_UNAUTHORIZED,
)
from sentry.apidocs.examples.event_examples import EventExamples
-from sentry.apidocs.parameters import GlobalParams, IssueParams
+from sentry.apidocs.parameters import EventParams, GlobalParams, IssueParams
from sentry.apidocs.utils import inline_sentry_response_serializer
from sentry.eventstore.models import Event, GroupEvent
+from sentry.exceptions import InvalidParams
from sentry.issues.endpoints.project_event_details import (
GroupEventDetailsResponse,
wrap_event_response,
@@ -49,7 +51,7 @@
def issue_search_query_to_conditions(
query: str, group: Group, user: User | AnonymousUser, environments: Sequence[Environment]
-) -> Sequence[Condition]:
+) -> list[Condition]:
from sentry.utils.snuba import resolve_column, resolve_conditions
dataset = (
@@ -135,14 +137,7 @@ class GroupEventDetailsEndpoint(GroupEndpoint):
IssueParams.ISSUES_OR_GROUPS,
IssueParams.ISSUE_ID,
GlobalParams.ENVIRONMENT,
- OpenApiParameter(
- name="event_id",
- type=OpenApiTypes.STR,
- location=OpenApiParameter.PATH,
- description="The ID of the event to retrieve, or 'latest', 'oldest', or 'recommended'.",
- required=True,
- enum=["latest", "oldest", "recommended"],
- ),
+ EventParams.EVENT_ID_EXTENDED,
],
responses={
200: inline_sentry_response_serializer(
@@ -159,56 +154,75 @@ def get(self, request: Request, group: Group, event_id: str) -> Response:
"""
Retrieves the details of an issue event.
"""
- environments = [e for e in get_environments(request, group.project.organization)]
+ organization = group.project.organization
+ environments = [e for e in get_environments(request, organization)]
environment_names = [e.name for e in environments]
+ try:
+ start, end = get_date_range_from_params(request.GET, optional=True)
+ except InvalidParams:
+ raise ParseError(detail="Invalid date range")
+
+ query = request.GET.get("query")
+ try:
+ conditions: list[Condition] = (
+ issue_search_query_to_conditions(query, group, request.user, environments)
+ if query
+ else []
+ )
+ except ValidationError:
+ raise ParseError(detail="Invalid event query")
+ except Exception:
+ logging.exception(
+ "group_event_details.parse_query",
+ extra={"query": query, "group": group.id, "organization": organization.id},
+ )
+ raise ParseError(detail="Unable to parse query")
+
+ if environments:
+ conditions.append(Condition(Column("environment"), Op.IN, environment_names))
+
+ metric = "api.endpoints.group_event_details.get"
+ error_response = {"detail": "Unable to apply query. Change or remove it and try again."}
+
+ event: Event | GroupEvent | None = None
+
if event_id == "latest":
- with metrics.timer("api.endpoints.group_event_details.get", tags={"type": "latest"}):
- event: Event | GroupEvent | None = group.get_latest_event_for_environments(
- environment_names
- )
+ with metrics.timer(metric, tags={"type": "latest", "query": bool(query)}):
+ try:
+ event = group.get_latest_event(conditions=conditions, start=start, end=end)
+ except ValueError:
+ return Response(error_response, status=400)
+
elif event_id == "oldest":
- with metrics.timer("api.endpoints.group_event_details.get", tags={"type": "oldest"}):
- event = group.get_oldest_event_for_environments(environment_names)
+ with metrics.timer(metric, tags={"type": "oldest", "query": bool(query)}):
+ try:
+ event = group.get_oldest_event(conditions=conditions, start=start, end=end)
+ except ValueError:
+ return Response(error_response, status=400)
+
elif event_id == "recommended":
- query = request.GET.get("query")
- if query:
- with metrics.timer(
- "api.endpoints.group_event_details.get",
- tags={"type": "helpful", "query": True},
- ):
- try:
- conditions = issue_search_query_to_conditions(
- query, group, request.user, environments
- )
- event = group.get_recommended_event_for_environments(
- environments, conditions
- )
- except ValidationError:
- return Response(status=400)
- except Exception:
- logging.exception(
- "group_event_details:get_helpful",
- )
- return Response(status=500)
- else:
- with metrics.timer(
- "api.endpoints.group_event_details.get",
- tags={"type": "helpful", "query": False},
- ):
- event = group.get_recommended_event_for_environments(environments)
+ with metrics.timer(metric, tags={"type": "helpful", "query": bool(query)}):
+ try:
+ event = group.get_recommended_event(conditions=conditions, start=start, end=end)
+ except ValueError:
+ return Response(error_response, status=400)
+
else:
- with metrics.timer("api.endpoints.group_event_details.get", tags={"type": "event"}):
+ with metrics.timer(metric, tags={"type": "event"}):
event = eventstore.backend.get_event_by_id(
- group.project.id, event_id, group_id=group.id
+ project_id=group.project.id, event_id=event_id, group_id=group.id
)
- # TODO: Remove `for_group` check once performance issues are moved to the issue platform
-
- if event is not None and hasattr(event, "for_group") and event.group:
+ if isinstance(event, Event) and event.group:
event = event.for_group(event.group)
if event is None:
- return Response({"detail": "Event not found"}, status=404)
+ error_text = (
+ "Event not found. The event ID may be incorrect, or its age exceeded the retention period."
+ if event_id not in {"recommended", "latest", "oldest"}
+ else "No matching event found. Try changing the environments, date range, or query."
+ )
+ return Response({"detail": error_text}, status=404)
collapse = request.GET.getlist("collapse", [])
if "stacktraceOnly" in collapse:
diff --git a/src/sentry/issues/endpoints/group_events.py b/src/sentry/issues/endpoints/group_events.py
index dc3e23c1fe8c3d..aed34a05bab13f 100644
--- a/src/sentry/issues/endpoints/group_events.py
+++ b/src/sentry/issues/endpoints/group_events.py
@@ -5,8 +5,7 @@
from typing import TYPE_CHECKING, Any
from django.utils import timezone
-from drf_spectacular.types import OpenApiTypes
-from drf_spectacular.utils import OpenApiParameter, extend_schema
+from drf_spectacular.utils import extend_schema
from rest_framework.exceptions import ParseError
from rest_framework.request import Request
from rest_framework.response import Response
@@ -30,7 +29,7 @@
RESPONSE_UNAUTHORIZED,
)
from sentry.apidocs.examples.event_examples import EventExamples
-from sentry.apidocs.parameters import GlobalParams, IssueParams
+from sentry.apidocs.parameters import EventParams, GlobalParams, IssueParams
from sentry.apidocs.utils import inline_sentry_response_serializer
from sentry.eventstore.models import Event
from sentry.exceptions import InvalidParams, InvalidSearchQuery
@@ -68,27 +67,9 @@ class GroupEventsEndpoint(GroupEndpoint, EnvironmentMixin):
GlobalParams.END,
GlobalParams.STATS_PERIOD,
GlobalParams.ENVIRONMENT,
- OpenApiParameter(
- name="full",
- type=OpenApiTypes.BOOL,
- location=OpenApiParameter.QUERY,
- description="Specify true to include the full event body, including the stacktrace, in the event payload.",
- required=False,
- ),
- OpenApiParameter(
- name="sample",
- type=OpenApiTypes.BOOL,
- location=OpenApiParameter.QUERY,
- description="Return events in pseudo-random order. This is deterministic so an identical query will always return the same events in the same order.",
- required=False,
- ),
- OpenApiParameter(
- name="query",
- location=OpenApiParameter.QUERY,
- type=OpenApiTypes.STR,
- description="An optional search query for filtering events.",
- required=False,
- ),
+ EventParams.FULL_PAYLOAD,
+ EventParams.SAMPLE,
+ EventParams.QUERY,
],
responses={
200: inline_sentry_response_serializer(
diff --git a/src/sentry/issues/endpoints/group_hashes.py b/src/sentry/issues/endpoints/group_hashes.py
index 73c5104ec47d8d..69a095e4824e90 100644
--- a/src/sentry/issues/endpoints/group_hashes.py
+++ b/src/sentry/issues/endpoints/group_hashes.py
@@ -9,6 +9,7 @@
from sentry.api.bases import GroupEndpoint
from sentry.api.paginator import GenericOffsetPaginator
from sentry.api.serializers import EventSerializer, SimpleEventSerializer, serialize
+from sentry.models.group import Group
from sentry.models.grouphash import GroupHash
from sentry.tasks.unmerge import unmerge
from sentry.utils import metrics
@@ -22,7 +23,7 @@ class GroupHashesEndpoint(GroupEndpoint):
"GET": ApiPublishStatus.PRIVATE,
}
- def get(self, request: Request, group) -> Response:
+ def get(self, request: Request, group: Group) -> Response:
"""
List an Issue's Hashes
``````````````````````
@@ -59,7 +60,7 @@ def get(self, request: Request, group) -> Response:
paginator=GenericOffsetPaginator(data_fn=data_fn),
)
- def put(self, request: Request, group) -> Response:
+ def put(self, request: Request, group: Group) -> Response:
"""
Perform an unmerge by reassigning events with hash values corresponding to the given
grouphash ids from being part of the given group to being part of a new group.
diff --git a/src/sentry/issues/endpoints/group_notes.py b/src/sentry/issues/endpoints/group_notes.py
index 4a76e5b45fe107..8f0b65ecf5edce 100644
--- a/src/sentry/issues/endpoints/group_notes.py
+++ b/src/sentry/issues/endpoints/group_notes.py
@@ -11,6 +11,7 @@
from sentry.api.serializers import serialize
from sentry.api.serializers.rest_framework.group_notes import NoteSerializer
from sentry.models.activity import Activity
+from sentry.models.group import Group
from sentry.models.groupsubscription import GroupSubscription
from sentry.notifications.types import GroupSubscriptionReason
from sentry.signals import comment_created
@@ -25,7 +26,7 @@ class GroupNotesEndpoint(GroupEndpoint):
"POST": ApiPublishStatus.UNKNOWN,
}
- def get(self, request: AuthenticatedHttpRequest, group) -> Response:
+ def get(self, request: AuthenticatedHttpRequest, group: Group) -> Response:
notes = Activity.objects.filter(group=group, type=ActivityType.NOTE.value)
return self.paginate(
@@ -36,7 +37,7 @@ def get(self, request: AuthenticatedHttpRequest, group) -> Response:
on_results=lambda x: serialize(x, request.user),
)
- def post(self, request: AuthenticatedHttpRequest, group) -> Response:
+ def post(self, request: AuthenticatedHttpRequest, group: Group) -> Response:
serializer = NoteSerializer(
data=request.data,
context={
diff --git a/src/sentry/issues/endpoints/group_notes_details.py b/src/sentry/issues/endpoints/group_notes_details.py
index 7097802a0f0f05..74192125336738 100644
--- a/src/sentry/issues/endpoints/group_notes_details.py
+++ b/src/sentry/issues/endpoints/group_notes_details.py
@@ -10,6 +10,7 @@
from sentry.api.serializers import serialize
from sentry.api.serializers.rest_framework.group_notes import NoteSerializer
from sentry.models.activity import Activity
+from sentry.models.group import Group
from sentry.models.groupsubscription import GroupSubscription
from sentry.notifications.types import GroupSubscriptionReason
from sentry.signals import comment_deleted, comment_updated
@@ -27,7 +28,7 @@ class GroupNotesDetailsEndpoint(GroupEndpoint):
# since an ApiKey is bound to the Organization, not
# an individual. Not sure if we'd want to allow an ApiKey
# to delete/update other users' comments
- def delete(self, request: Request, group, note_id) -> Response:
+ def delete(self, request: Request, group: Group, note_id: str) -> Response:
if not request.user.is_authenticated:
raise PermissionDenied(detail="Key doesn't have permission to delete Note")
@@ -69,7 +70,7 @@ def delete(self, request: Request, group, note_id) -> Response:
return Response(status=204)
- def put(self, request: Request, group, note_id) -> Response:
+ def put(self, request: Request, group: Group, note_id: str) -> Response:
if not request.user.is_authenticated:
raise PermissionDenied(detail="Key doesn't have permission to edit Note")
diff --git a/src/sentry/issues/endpoints/group_participants.py b/src/sentry/issues/endpoints/group_participants.py
deleted file mode 100644
index 1662be566b0b9b..00000000000000
--- a/src/sentry/issues/endpoints/group_participants.py
+++ /dev/null
@@ -1,25 +0,0 @@
-from __future__ import annotations
-
-from rest_framework.request import Request
-from rest_framework.response import Response
-
-from sentry.api.api_publish_status import ApiPublishStatus
-from sentry.api.base import region_silo_endpoint
-from sentry.api.bases import GroupEndpoint
-from sentry.models.group import Group
-from sentry.models.groupsubscription import GroupSubscriptionManager
-from sentry.users.services.user.service import user_service
-
-
-@region_silo_endpoint
-class GroupParticipantsEndpoint(GroupEndpoint):
- publish_status = {
- "GET": ApiPublishStatus.UNKNOWN,
- }
-
- def get(self, request: Request, group: Group) -> Response:
- participants = GroupSubscriptionManager.get_participating_user_ids(group)
-
- return Response(
- user_service.serialize_many(filter={"user_ids": participants}, as_user=request.user)
- )
diff --git a/src/sentry/issues/endpoints/group_similar_issues.py b/src/sentry/issues/endpoints/group_similar_issues.py
index cc7d8cedf49367..a5fb4f59a40171 100644
--- a/src/sentry/issues/endpoints/group_similar_issues.py
+++ b/src/sentry/issues/endpoints/group_similar_issues.py
@@ -13,7 +13,7 @@
logger = logging.getLogger(__name__)
-def _fix_label(label):
+def _fix_label(label) -> str:
if isinstance(label, tuple):
return ":".join(label)
return label
@@ -25,7 +25,7 @@ class GroupSimilarIssuesEndpoint(GroupEndpoint):
"GET": ApiPublishStatus.PRIVATE,
}
- def get(self, request: Request, group) -> Response:
+ def get(self, request: Request, group: Group) -> Response:
features = similarity.features
limit_s = request.GET.get("limit", None)
@@ -54,13 +54,13 @@ def get(self, request: Request, group) -> Response:
# We need to preserve the ordering of the Redis results, as that
# ordering is directly shown in the UI
for group_id, scores in zip(group_ids, group_scores):
- group = serialized_groups.get(group_id)
- if group is None:
+ serialized_group = serialized_groups.get(group_id)
+ if serialized_group is None:
# TODO(tkaemming): This should log when we filter out a group that is
# unable to be retrieved from the database. (This will soon be
# unexpected behavior, but still possible.)
continue
- results.append((group, {_fix_label(k): v for k, v in scores.items()}))
+ results.append((serialized_group, {_fix_label(k): v for k, v in scores.items()}))
return Response(results)
diff --git a/src/sentry/issues/endpoints/group_similar_issues_embeddings.py b/src/sentry/issues/endpoints/group_similar_issues_embeddings.py
index b51e57e3f8b760..fb46b54b8381da 100644
--- a/src/sentry/issues/endpoints/group_similar_issues_embeddings.py
+++ b/src/sentry/issues/endpoints/group_similar_issues_embeddings.py
@@ -12,22 +12,23 @@
from sentry.api.base import region_silo_endpoint
from sentry.api.bases.group import GroupEndpoint
from sentry.api.serializers import serialize
-from sentry.grouping.grouping_info import get_grouping_info
+from sentry.grouping.grouping_info import get_grouping_info_from_variants
from sentry.models.group import Group
from sentry.models.grouphash import GroupHash
from sentry.seer.similarity.similar_issues import get_similarity_data_from_seer
from sentry.seer.similarity.types import SeerSimilarIssueData, SimilarIssuesEmbeddingsRequest
from sentry.seer.similarity.utils import (
+ ReferrerOptions,
TooManyOnlySystemFramesException,
event_content_has_stacktrace,
get_stacktrace_string,
+ has_too_many_contributing_frames,
killswitch_enabled,
)
from sentry.users.models.user import User
from sentry.utils.safe import get_path
logger = logging.getLogger(__name__)
-MAX_FRAME_COUNT = 50
class FormattedSimilarIssuesEmbeddingsData(TypedDict):
@@ -42,24 +43,26 @@ class GroupSimilarIssuesEmbeddingsEndpoint(GroupEndpoint):
"GET": ApiPublishStatus.PRIVATE,
}
- def get_group_hashes_for_group_id(self, group_id: int) -> set[str]:
- hashes = GroupHash.objects.filter(group_id=group_id)
- return {hash.hash for hash in hashes}
-
def get_formatted_results(
self,
similar_issues_data: Sequence[SeerSimilarIssueData],
user: User | AnonymousUser,
- group_id: int,
+ group: Group,
) -> Sequence[tuple[Mapping[str, Any], Mapping[str, Any]] | None]:
"""
Format the responses using to be used by the frontend by changing the field names and
changing the cosine distances into cosine similarities.
"""
- hashes = self.get_group_hashes_for_group_id(group_id)
group_data = {}
+ parent_hashes = [
+ similar_issue_data.parent_hash for similar_issue_data in similar_issues_data
+ ]
+ group_hashes = GroupHash.objects.filter(project_id=group.project_id, hash__in=parent_hashes)
+ parent_hashes_group_ids = {
+ group_hash.hash: group_hash.group_id for group_hash in group_hashes
+ }
for similar_issue_data in similar_issues_data:
- if similar_issue_data.parent_hash not in hashes:
+ if parent_hashes_group_ids[similar_issue_data.parent_hash] != group.id:
formatted_response: FormattedSimilarIssuesEmbeddingsData = {
"exception": round(1 - similar_issue_data.stacktrace_distance, 4),
"shouldBeGrouped": "Yes" if similar_issue_data.should_group else "No",
@@ -76,21 +79,27 @@ def get_formatted_results(
return [(serialized_groups[group_id], group_data[group_id]) for group_id in group_data]
def get(self, request: Request, group: Group) -> Response:
- if killswitch_enabled(group.project.id):
+ if killswitch_enabled(group.project.id, ReferrerOptions.SIMILAR_ISSUES_TAB):
return Response([])
latest_event = group.get_latest_event()
stacktrace_string = ""
+
if latest_event and event_content_has_stacktrace(latest_event):
- grouping_info = get_grouping_info(None, project=group.project, event=latest_event)
- try:
- stacktrace_string = get_stacktrace_string(
- grouping_info, platform=latest_event.platform
- )
- except TooManyOnlySystemFramesException:
- pass
- except Exception:
- logger.exception("Unexpected exception in stacktrace string formatting")
+ variants = latest_event.get_grouping_variants(normalize_stacktraces=True)
+
+ if not has_too_many_contributing_frames(
+ latest_event, variants, ReferrerOptions.SIMILAR_ISSUES_TAB
+ ):
+ grouping_info = get_grouping_info_from_variants(variants)
+ try:
+ stacktrace_string = get_stacktrace_string(
+ grouping_info, platform=latest_event.platform
+ )
+ except TooManyOnlySystemFramesException:
+ pass
+ except Exception:
+ logger.exception("Unexpected exception in stacktrace string formatting")
if not stacktrace_string or not latest_event:
return Response([]) # No exception, stacktrace or in-app frames, or event
@@ -138,6 +147,6 @@ def get(self, request: Request, group: Group) -> Response:
if not results:
return Response([])
- formatted_results = self.get_formatted_results(results, request.user, group.id)
+ formatted_results = self.get_formatted_results(results, request.user, group)
return Response(formatted_results)
diff --git a/src/sentry/issues/endpoints/group_tombstone.py b/src/sentry/issues/endpoints/group_tombstone.py
index ac5c2faee71b1d..cf9ad9e38148d3 100644
--- a/src/sentry/issues/endpoints/group_tombstone.py
+++ b/src/sentry/issues/endpoints/group_tombstone.py
@@ -8,6 +8,7 @@
from sentry.api.paginator import OffsetPaginator
from sentry.api.serializers import serialize
from sentry.models.grouptombstone import GroupTombstone
+from sentry.models.project import Project
@region_silo_endpoint
@@ -17,7 +18,7 @@ class GroupTombstoneEndpoint(ProjectEndpoint):
"GET": ApiPublishStatus.PRIVATE,
}
- def get(self, request: Request, project) -> Response:
+ def get(self, request: Request, project: Project) -> Response:
"""
Retrieve a Project's GroupTombstones
````````````````````````````````````
diff --git a/src/sentry/issues/endpoints/group_tombstone_details.py b/src/sentry/issues/endpoints/group_tombstone_details.py
index addd62e9489545..f2aa03517e6698 100644
--- a/src/sentry/issues/endpoints/group_tombstone_details.py
+++ b/src/sentry/issues/endpoints/group_tombstone_details.py
@@ -8,6 +8,7 @@
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.models.grouphash import GroupHash
from sentry.models.grouptombstone import GroupTombstone
+from sentry.models.project import Project
@region_silo_endpoint
@@ -17,7 +18,7 @@ class GroupTombstoneDetailsEndpoint(ProjectEndpoint):
"DELETE": ApiPublishStatus.PRIVATE,
}
- def delete(self, request: Request, project, tombstone_id) -> Response:
+ def delete(self, request: Request, project: Project, tombstone_id: str) -> Response:
"""
Remove a GroupTombstone
```````````````````````
diff --git a/src/sentry/issues/endpoints/organization_eventid.py b/src/sentry/issues/endpoints/organization_eventid.py
index 3fddf7346d45c5..929cbd156b90ba 100644
--- a/src/sentry/issues/endpoints/organization_eventid.py
+++ b/src/sentry/issues/endpoints/organization_eventid.py
@@ -8,6 +8,7 @@
from sentry.api.bases.organization import OrganizationEndpoint
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.serializers import serialize
+from sentry.models.organization import Organization
from sentry.models.project import Project
from sentry.types.ratelimit import RateLimit, RateLimitCategory
from sentry.utils.validators import INVALID_ID_DETAILS, is_event_id
@@ -28,7 +29,7 @@ class EventIdLookupEndpoint(OrganizationEndpoint):
}
}
- def get(self, request: Request, organization, event_id) -> Response:
+ def get(self, request: Request, organization: Organization, event_id: str) -> Response:
"""
Resolve an Event ID
``````````````````
diff --git a/src/sentry/issues/endpoints/organization_group_index.py b/src/sentry/issues/endpoints/organization_group_index.py
index 3d53a2bc19f4dd..08040c40b1c9ec 100644
--- a/src/sentry/issues/endpoints/organization_group_index.py
+++ b/src/sentry/issues/endpoints/organization_group_index.py
@@ -23,7 +23,7 @@
delete_groups,
get_by_short_id,
track_slo_response,
- update_groups,
+ update_groups_with_search_fn,
)
from sentry.api.helpers.group_index.validators import ValidationError
from sentry.api.paginator import DateTimePaginator, Paginator
@@ -36,6 +36,7 @@
from sentry.models.group import QUERY_STATUS_LOOKUP, Group, GroupStatus
from sentry.models.groupenvironment import GroupEnvironment
from sentry.models.groupinbox import GroupInbox
+from sentry.models.organization import Organization
from sentry.models.project import Project
from sentry.search.events.constants import EQUALITY_OPERATORS
from sentry.search.snuba.backend import assigned_or_suggested_filter
@@ -59,7 +60,7 @@ def inbox_search(
date_to: datetime | None = None,
max_hits: int | None = None,
actor: Any | None = None,
-) -> CursorResult:
+) -> CursorResult[Group]:
now: datetime = timezone.now()
end: datetime | None = None
end_params: list[datetime] = [
@@ -151,8 +152,13 @@ class OrganizationGroupIndexEndpoint(OrganizationEndpoint):
enforce_rate_limit = True
def _search(
- self, request: Request, organization, projects, environments, extra_query_kwargs=None
- ):
+ self,
+ request: Request,
+ organization: Organization,
+ projects: Sequence[Project],
+ environments: Sequence[Environment],
+ extra_query_kwargs: None | Mapping[str, Any] = None,
+ ) -> tuple[CursorResult[Group], Mapping[str, Any]]:
with start_span(op="_search"):
query_kwargs = build_query_params_from_request(
request, organization, projects, environments
@@ -201,7 +207,7 @@ def use_group_snuba_dataset() -> bool:
return result, query_kwargs
@track_slo_response("workflow")
- def get(self, request: Request, organization) -> Response:
+ def get(self, request: Request, organization: Organization) -> Response:
"""
List an Organization's Issues
`````````````````````````````
@@ -406,7 +412,7 @@ def get(self, request: Request, organization) -> Response:
return response
@track_slo_response("workflow")
- def put(self, request: Request, organization) -> Response:
+ def put(self, request: Request, organization: Organization) -> Response:
"""
Bulk Mutate a List of Issues
````````````````````````````
@@ -490,10 +496,10 @@ def put(self, request: Request, organization) -> Response:
)
ids = [int(id) for id in request.GET.getlist("id")]
- return update_groups(request, ids, projects, organization.id, search_fn)
+ return update_groups_with_search_fn(request, ids, projects, organization.id, search_fn)
@track_slo_response("workflow")
- def delete(self, request: Request, organization) -> Response:
+ def delete(self, request: Request, organization: Organization) -> Response:
"""
Bulk Remove a List of Issues
````````````````````````````
diff --git a/src/sentry/issues/endpoints/organization_shortid.py b/src/sentry/issues/endpoints/organization_shortid.py
index c7c50db957f054..0f08a7e6d867f8 100644
--- a/src/sentry/issues/endpoints/organization_shortid.py
+++ b/src/sentry/issues/endpoints/organization_shortid.py
@@ -8,6 +8,7 @@
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.serializers import serialize
from sentry.models.group import Group
+from sentry.models.organization import Organization
@region_silo_endpoint
@@ -17,7 +18,7 @@ class ShortIdLookupEndpoint(OrganizationEndpoint):
"GET": ApiPublishStatus.UNKNOWN,
}
- def get(self, request: Request, organization, short_id) -> Response:
+ def get(self, request: Request, organization: Organization, short_id: str) -> Response:
"""
Resolve a Short ID
``````````````````
diff --git a/src/sentry/issues/endpoints/project_event_details.py b/src/sentry/issues/endpoints/project_event_details.py
index b3c1650db7f8f9..e477e4701b41f6 100644
--- a/src/sentry/issues/endpoints/project_event_details.py
+++ b/src/sentry/issues/endpoints/project_event_details.py
@@ -1,6 +1,7 @@
from datetime import datetime
from typing import Any
+import sentry_sdk
from rest_framework.request import Request
from rest_framework.response import Response
@@ -12,6 +13,7 @@
from sentry.api.serializers import IssueEventSerializer, serialize
from sentry.api.serializers.models.event import IssueEventSerializerResponse
from sentry.eventstore.models import Event, GroupEvent
+from sentry.models.project import Project
class GroupEventDetailsResponse(IssueEventSerializerResponse):
@@ -72,7 +74,7 @@ class ProjectEventDetailsEndpoint(ProjectEndpoint):
"GET": ApiPublishStatus.EXPERIMENTAL,
}
- def get(self, request: Request, project, event_id) -> Response:
+ def get(self, request: Request, project: Project, event_id: str) -> Response:
"""
Retrieve an Event for a Project
```````````````````````````````
@@ -120,7 +122,7 @@ class EventJsonEndpoint(ProjectEndpoint):
"GET": ApiPublishStatus.EXPERIMENTAL,
}
- def get(self, request: Request, project, event_id) -> Response:
+ def get(self, request: Request, project: Project, event_id: str) -> Response:
event = eventstore.backend.get_event_by_id(project.id, event_id)
if not event:
@@ -130,4 +132,23 @@ def get(self, request: Request, project, event_id) -> Response:
if isinstance(event_dict["datetime"], datetime):
event_dict["datetime"] = event_dict["datetime"].isoformat()
+ try:
+ scrub_ip_addresses = project.organization.get_option(
+ "sentry:require_scrub_ip_address", False
+ ) or project.get_option("sentry:scrub_ip_address", False)
+
+ if scrub_ip_addresses:
+ if "spans" in event_dict:
+ for span in event_dict["spans"]:
+ if "sentry_tags" not in span:
+ continue
+ if "user.ip" in span["sentry_tags"]:
+ del span["sentry_tags"]["user.ip"]
+ if "user" in span["sentry_tags"] and span["sentry_tags"]["user"].startswith(
+ "ip:"
+ ):
+ span["sentry_tags"]["user"] = "ip:[ip]"
+ except Exception as e:
+ sentry_sdk.capture_exception(e)
+
return Response(event_dict, status=200)
diff --git a/src/sentry/issues/endpoints/project_events.py b/src/sentry/issues/endpoints/project_events.py
index 63e830ee08fde4..66d35d105d0cef 100644
--- a/src/sentry/issues/endpoints/project_events.py
+++ b/src/sentry/issues/endpoints/project_events.py
@@ -2,7 +2,7 @@
from functools import partial
from django.utils import timezone
-from drf_spectacular.utils import OpenApiParameter, extend_schema
+from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.response import Response
@@ -15,7 +15,7 @@
from sentry.api.serializers.models.event import SimpleEventSerializerResponse
from sentry.apidocs.constants import RESPONSE_FORBIDDEN, RESPONSE_NOT_FOUND, RESPONSE_UNAUTHORIZED
from sentry.apidocs.examples.event_examples import EventExamples
-from sentry.apidocs.parameters import CursorQueryParam, GlobalParams
+from sentry.apidocs.parameters import CursorQueryParam, EventParams, GlobalParams
from sentry.apidocs.utils import inline_sentry_response_serializer
from sentry.models.project import Project
from sentry.snuba.events import Columns
@@ -44,22 +44,8 @@ class ProjectEventsEndpoint(ProjectEndpoint):
GlobalParams.ORG_ID_OR_SLUG,
GlobalParams.PROJECT_ID_OR_SLUG,
CursorQueryParam,
- OpenApiParameter(
- name="full",
- description="If this is set to true, the event payload will include the full event body, including the stacktrace. Set to 1 to enable.",
- required=False,
- type=bool,
- location="query",
- default=False,
- ),
- OpenApiParameter(
- name="sample",
- description="Return events in pseudo-random order. This is deterministic so an identical query will always return the same events in the same order.",
- required=False,
- type=bool,
- location="query",
- default=False,
- ),
+ EventParams.FULL_PAYLOAD,
+ EventParams.SAMPLE,
],
responses={
200: inline_sentry_response_serializer(
diff --git a/src/sentry/issues/endpoints/project_group_index.py b/src/sentry/issues/endpoints/project_group_index.py
index 06a5eb2b3c5b78..8f5c9d956da839 100644
--- a/src/sentry/issues/endpoints/project_group_index.py
+++ b/src/sentry/issues/endpoints/project_group_index.py
@@ -13,7 +13,7 @@
get_by_short_id,
prep_search,
track_slo_response,
- update_groups,
+ update_groups_with_search_fn,
)
from sentry.api.helpers.group_index.validators import ValidationError
from sentry.api.serializers import serialize
@@ -21,6 +21,7 @@
from sentry.models.environment import Environment
from sentry.models.group import QUERY_STATUS_LOOKUP, Group, GroupStatus
from sentry.models.grouphash import GroupHash
+from sentry.models.project import Project
from sentry.search.events.constants import EQUALITY_OPERATORS
from sentry.signals import advanced_search
from sentry.types.ratelimit import RateLimit, RateLimitCategory
@@ -50,7 +51,7 @@ class ProjectGroupIndexEndpoint(ProjectEndpoint, EnvironmentMixin):
}
@track_slo_response("workflow")
- def get(self, request: Request, project) -> Response:
+ def get(self, request: Request, project: Project) -> Response:
"""
List a Project's Issues
```````````````````````
@@ -208,7 +209,7 @@ def get(self, request: Request, project) -> Response:
return response
@track_slo_response("workflow")
- def put(self, request: Request, project) -> Response:
+ def put(self, request: Request, project: Project) -> Response:
"""
Bulk Mutate a List of Issues
````````````````````````````
@@ -269,7 +270,7 @@ def put(self, request: Request, project) -> Response:
"""
search_fn = functools.partial(prep_search, self, request, project)
- return update_groups(
+ return update_groups_with_search_fn(
request,
request.GET.getlist("id"),
[project],
@@ -278,7 +279,7 @@ def put(self, request: Request, project) -> Response:
)
@track_slo_response("workflow")
- def delete(self, request: Request, project) -> Response:
+ def delete(self, request: Request, project: Project) -> Response:
"""
Bulk Remove a List of Issues
````````````````````````````
diff --git a/src/sentry/issues/endpoints/project_group_stats.py b/src/sentry/issues/endpoints/project_group_stats.py
index 4a1227daf0f93c..3781a999f4a718 100644
--- a/src/sentry/issues/endpoints/project_group_stats.py
+++ b/src/sentry/issues/endpoints/project_group_stats.py
@@ -9,6 +9,7 @@
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.models.environment import Environment
from sentry.models.group import Group
+from sentry.models.project import Project
from sentry.tsdb.base import TSDBModel
from sentry.types.ratelimit import RateLimit, RateLimitCategory
@@ -28,7 +29,7 @@ class ProjectGroupStatsEndpoint(ProjectEndpoint, EnvironmentMixin, StatsMixin):
}
}
- def get(self, request: Request, project) -> Response:
+ def get(self, request: Request, project: Project) -> Response:
try:
environment_id = self._get_environment_id_from_request(request, project.organization_id)
except Environment.DoesNotExist:
diff --git a/src/sentry/issues/endpoints/source_map_debug.py b/src/sentry/issues/endpoints/source_map_debug.py
index fa07de1c795f40..a20d6008c55c02 100644
--- a/src/sentry/issues/endpoints/source_map_debug.py
+++ b/src/sentry/issues/endpoints/source_map_debug.py
@@ -78,9 +78,6 @@ def get(self, request: Request, project: Project, event_id: str) -> Response:
debug_response = source_map_debug(project, event_id, exception_idx, frame_idx)
issue, data = debug_response.issue, debug_response.data
- return self._create_response(issue, data)
-
- def _create_response(self, issue=None, data=None) -> Response:
errors_list = []
if issue:
response = SourceMapProcessingIssue(issue, data=data).get_api_context()
diff --git a/src/sentry/issues/grouptype.py b/src/sentry/issues/grouptype.py
index 7810e9f66ba712..3819bc06de9ad7 100644
--- a/src/sentry/issues/grouptype.py
+++ b/src/sentry/issues/grouptype.py
@@ -1,11 +1,12 @@
from __future__ import annotations
import importlib
+import logging
from collections import defaultdict
from dataclasses import dataclass, field
from datetime import timedelta
from enum import Enum, StrEnum
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, ClassVar
import sentry_sdk
from django.apps import apps
@@ -21,11 +22,8 @@
if TYPE_CHECKING:
from sentry.models.organization import Organization
from sentry.models.project import Project
- from sentry.users.models.user import User
- from sentry.workflow_engine.handlers.detector import DetectorHandler
from sentry.workflow_engine.endpoints.validators import BaseGroupTypeDetectorValidator
-
-import logging
+ from sentry.workflow_engine.handlers.detector import DetectorHandler
logger = logging.getLogger(__name__)
@@ -174,6 +172,10 @@ class GroupType:
notification_config: NotificationConfig = NotificationConfig()
detector_handler: type[DetectorHandler] | None = None
detector_validator: type[BaseGroupTypeDetectorValidator] | None = None
+ # Controls whether status change (i.e. resolved, regressed) workflow notifications are enabled.
+ # Defaults to true to maintain the default workflow notification behavior as it exists for error group types.
+ enable_status_change_workflow_notifications: bool = True
+ detector_config_schema: ClassVar[dict[str, Any]] = {}
def __init_subclass__(cls: type[GroupType], **kwargs: Any) -> None:
super().__init_subclass__(**kwargs)
@@ -189,13 +191,6 @@ def __post_init__(self) -> None:
if self.category not in valid_categories:
raise ValueError(f"Category must be one of {valid_categories} from GroupCategory.")
- @classmethod
- def is_visible(cls, organization: Organization, user: User | None = None) -> bool:
- if cls.released:
- return True
-
- return features.has(cls.build_visible_feature_name(), organization, actor=user)
-
@classmethod
def allow_ingest(cls, organization: Organization) -> bool:
if cls.released:
@@ -635,6 +630,7 @@ class MetricIssuePOC(GroupType):
default_priority = PriorityLevel.HIGH
enable_auto_resolve = False
enable_escalation_detection = False
+ enable_status_change_workflow_notifications = False
def should_create_group(
@@ -669,7 +665,7 @@ def should_create_group(
return False
-def import_grouptype():
+def import_grouptype() -> None:
"""
Ensures that grouptype.py is imported in any apps that implement it. We do this to make sure that all implemented
grouptypes are loaded and registered.
diff --git a/src/sentry/issues/highlights.py b/src/sentry/issues/highlights.py
index 2e05a5aabddee1..a572832919d4d6 100644
--- a/src/sentry/issues/highlights.py
+++ b/src/sentry/issues/highlights.py
@@ -7,7 +7,7 @@
from rest_framework import serializers
from sentry.models.project import Project
-from sentry.utils.platform_categories import BACKEND, FRONTEND, MOBILE
+from sentry.utils.platform_categories import MOBILE
@extend_schema_field(field=OpenApiTypes.OBJECT)
@@ -35,33 +35,22 @@ class HighlightPreset(TypedDict):
context: Mapping[str, list[str]]
-SENTRY_TAGS = ["handled", "level", "release", "environment"]
+DEFAULT_HIGHLIGHT_TAGS = ["handled", "level"]
+DEFAULT_HIGHLIGHT_CTX = {"trace": ["trace_id"]}
-BACKEND_HIGHLIGHTS: HighlightPreset = {
- "tags": SENTRY_TAGS + ["url", "transaction", "status_code"],
- "context": {"trace": ["trace_id"], "runtime": ["name", "version"]},
-}
-FRONTEND_HIGHLIGHTS: HighlightPreset = {
- "tags": SENTRY_TAGS + ["url", "transaction", "browser", "user"],
- "context": {"user": ["email"]},
-}
MOBILE_HIGHLIGHTS: HighlightPreset = {
- "tags": SENTRY_TAGS + ["mobile", "main_thread"],
- "context": {"profile": ["profile_id"], "app": ["name"], "device": ["family"]},
+ "tags": DEFAULT_HIGHLIGHT_TAGS + ["mobile", "main_thread"],
+ "context": {**DEFAULT_HIGHLIGHT_CTX, "profile": ["profile_id"], "app": ["name"]},
}
FALLBACK_HIGHLIGHTS: HighlightPreset = {
- "tags": SENTRY_TAGS,
- "context": {"user": ["email"], "trace": ["trace_id"]},
+ "tags": DEFAULT_HIGHLIGHT_TAGS + ["url"],
+ "context": {**DEFAULT_HIGHLIGHT_CTX},
}
def get_highlight_preset_for_project(project: Project) -> HighlightPreset:
if not project.platform or project.platform == "other":
return FALLBACK_HIGHLIGHTS
- elif project.platform in FRONTEND:
- return FRONTEND_HIGHLIGHTS
- elif project.platform in BACKEND:
- return BACKEND_HIGHLIGHTS
elif project.platform in MOBILE:
return MOBILE_HIGHLIGHTS
return FALLBACK_HIGHLIGHTS
diff --git a/src/sentry/issues/priority.py b/src/sentry/issues/priority.py
index c42aef02b43656..99195a307406da 100644
--- a/src/sentry/issues/priority.py
+++ b/src/sentry/issues/priority.py
@@ -30,10 +30,6 @@ class PriorityChangeReason(Enum):
PriorityLevel.LOW: GroupHistoryStatus.PRIORITY_LOW,
}
-GROUP_HISTORY_STATUS_TO_PRIORITY = {
- value: key for key, value in PRIORITY_TO_GROUP_HISTORY_STATUS.items()
-}
-
def update_priority(
group: Group,
diff --git a/src/sentry/lang/java/utils.py b/src/sentry/lang/java/utils.py
index 8fb98d52d6ff08..2d8dd6a0de0aa4 100644
--- a/src/sentry/lang/java/utils.py
+++ b/src/sentry/lang/java/utils.py
@@ -12,7 +12,6 @@
from sentry.models.debugfile import ProjectDebugFile
from sentry.models.project import Project
from sentry.stacktraces.processing import StacktraceInfo
-from sentry.utils import metrics
from sentry.utils.cache import cache_key_for_event
from sentry.utils.safe import get_path
@@ -133,8 +132,8 @@ def is_jvm_event(data: Any, stacktraces: list[StacktraceInfo]) -> bool:
return True
# check if there are any JVM or Proguard images
- # TODO: Can this actually happen if the event platform
- # is not "java"?
+ # we *do* hit this code path, likely for events that don't have platform
+ # `"java"` but contain Java view hierarchies.
images = get_path(
data,
"debug_meta",
@@ -144,8 +143,6 @@ def is_jvm_event(data: Any, stacktraces: list[StacktraceInfo]) -> bool:
)
if images:
- metrics.incr("process.java.symbolicate.missing_platform", tags={platform: platform})
-
return True
return False
diff --git a/src/sentry/lang/native/processing.py b/src/sentry/lang/native/processing.py
index 022b30d4198780..b1cef31a389698 100644
--- a/src/sentry/lang/native/processing.py
+++ b/src/sentry/lang/native/processing.py
@@ -489,6 +489,8 @@ def emit_apple_symbol_stats(apple_symbol_stats, data):
data, "contexts", "os", "raw_description"
)
os_version = get_path(data, "contexts", "os", "version")
+ # See https://develop.sentry.dev/sdk/data-model/event-payloads/contexts/
+ is_simulator = get_path(data, "contexts", "device", "simulator", default=False)
if os_version:
os_version = os_version.split(".", 1)[0]
@@ -497,19 +499,25 @@ def emit_apple_symbol_stats(apple_symbol_stats, data):
metrics.incr(
"apple_symbol_availability_v2",
amount=neither,
- tags={"availability": "neither", "os_name": os_name, "os_version": os_version},
+ tags={
+ "availability": "neither",
+ "os_name": os_name,
+ "os_version": os_version,
+ "is_simulator": is_simulator,
+ },
sample_rate=1.0,
)
- # TODO: This seems to just be wrong
- # We want mutual exclusion here, since we don't want to double count. E.g., an event has both symbols, so we
- # count it both in `both` and `old` or `symx` which makes it impossible for us to know the percentage of events
- # that matched both.
if both := apple_symbol_stats.get("both"):
metrics.incr(
"apple_symbol_availability_v2",
amount=both,
- tags={"availability": "both", "os_name": os_name, "os_version": os_version},
+ tags={
+ "availability": "both",
+ "os_name": os_name,
+ "os_version": os_version,
+ "is_simulator": is_simulator,
+ },
sample_rate=1.0,
)
@@ -517,7 +525,12 @@ def emit_apple_symbol_stats(apple_symbol_stats, data):
metrics.incr(
"apple_symbol_availability_v2",
amount=old,
- tags={"availability": "old", "os_name": os_name, "os_version": os_version},
+ tags={
+ "availability": "old",
+ "os_name": os_name,
+ "os_version": os_version,
+ "is_simulator": is_simulator,
+ },
sample_rate=1.0,
)
@@ -525,7 +538,12 @@ def emit_apple_symbol_stats(apple_symbol_stats, data):
metrics.incr(
"apple_symbol_availability_v2",
amount=symx,
- tags={"availability": "symx", "os_name": os_name, "os_version": os_version},
+ tags={
+ "availability": "symx",
+ "os_name": os_name,
+ "os_version": os_version,
+ "is_simulator": is_simulator,
+ },
sample_rate=1.0,
)
diff --git a/src/sentry/logging/handlers.py b/src/sentry/logging/handlers.py
index 2a05424f00b331..4113bbeb59be15 100644
--- a/src/sentry/logging/handlers.py
+++ b/src/sentry/logging/handlers.py
@@ -1,5 +1,8 @@
+from __future__ import annotations
+
import logging
import re
+from typing import Any
from django.utils.timezone import now
from structlog import get_logger
@@ -79,7 +82,7 @@ def __call__(self, logger, name, event_dict):
class StructLogHandler(logging.StreamHandler):
- def get_log_kwargs(self, record, logger):
+ def get_log_kwargs(self, record: logging.LogRecord) -> dict[str, Any]:
kwargs = {k: v for k, v in vars(record).items() if k not in throwaways and v is not None}
kwargs.update({"level": record.levelno, "event": record.msg})
@@ -96,7 +99,7 @@ def get_log_kwargs(self, record, logger):
return kwargs
- def emit(self, record, logger=None):
+ def emit(self, record: logging.LogRecord, logger: logging.Logger | None = None) -> None:
# If anyone wants to use the 'extra' kwarg to provide context within
# structlog, we have to strip all of the default attributes from
# a record because the RootLogger will take the 'extra' dictionary
@@ -104,12 +107,24 @@ def emit(self, record, logger=None):
try:
if logger is None:
logger = get_logger()
- logger.log(**self.get_log_kwargs(record=record, logger=logger))
+ logger.log(**self.get_log_kwargs(record=record))
except Exception:
if logging.raiseExceptions:
raise
+class GKEStructLogHandler(StructLogHandler):
+ def get_log_kwargs(self, record: logging.LogRecord) -> dict[str, Any]:
+ kwargs = super().get_log_kwargs(record)
+ kwargs.update(
+ {
+ "logging.googleapis.com/labels": {"name": kwargs.get("name", "root")},
+ "severity": record.levelname,
+ }
+ )
+ return kwargs
+
+
class MessageContainsFilter(logging.Filter):
"""
A logging filter that allows log records where the message
diff --git a/src/sentry/middleware/integrations/parsers/gitlab.py b/src/sentry/middleware/integrations/parsers/gitlab.py
index 9f48aba2f27ef8..aad4e9a4c48b71 100644
--- a/src/sentry/middleware/integrations/parsers/gitlab.py
+++ b/src/sentry/middleware/integrations/parsers/gitlab.py
@@ -8,7 +8,7 @@
from django.http.response import HttpResponseBase
from sentry.hybridcloud.outbox.category import WebhookProviderIdentifier
-from sentry.integrations.gitlab.webhooks import GitlabWebhookEndpoint, GitlabWebhookMixin
+from sentry.integrations.gitlab.webhooks import GitlabWebhookEndpoint, get_gitlab_external_id
from sentry.integrations.middleware.hybrid_cloud.parser import BaseRequestParser
from sentry.integrations.models.integration import Integration
from sentry.integrations.models.organization_integration import OrganizationIntegration
@@ -20,7 +20,7 @@
logger = logging.getLogger(__name__)
-class GitlabRequestParser(BaseRequestParser, GitlabWebhookMixin):
+class GitlabRequestParser(BaseRequestParser):
provider = EXTERNAL_PROVIDERS[ExternalProviders.GITLAB]
webhook_identifier = WebhookProviderIdentifier.GITLAB
_integration: Integration | None = None
@@ -35,7 +35,7 @@ def _resolve_external_id(self) -> tuple[str, str] | HttpResponseBase:
# AppPlatformEvents also hit this API
"event-type": self.request.META.get("HTTP_X_GITLAB_EVENT"),
}
- return super()._get_external_id(request=self.request, extra=extra)
+ return get_gitlab_external_id(request=self.request, extra=extra)
@control_silo_function
def get_integration_from_request(self) -> Integration | None:
diff --git a/src/sentry/migrations/0803_delete_unused_metricskeyindexer_pt1.py b/src/sentry/migrations/0803_delete_unused_metricskeyindexer_pt1.py
new file mode 100644
index 00000000000000..e468f091384ec2
--- /dev/null
+++ b/src/sentry/migrations/0803_delete_unused_metricskeyindexer_pt1.py
@@ -0,0 +1,29 @@
+# Generated by Django 5.1.4 on 2024-12-19 20:24
+
+from sentry.new_migrations.migrations import CheckedMigration
+from sentry.new_migrations.monkey.models import SafeDeleteModel
+from sentry.new_migrations.monkey.state import DeletionAction
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("sentry", "0802_remove_grouping_auto_update_option"),
+ ]
+
+ operations = [
+ SafeDeleteModel(name="MetricsKeyIndexer", deletion_action=DeletionAction.MOVE_TO_PENDING),
+ ]
diff --git a/src/sentry/migrations/0804_delete_metrics_key_indexer_pt2.py b/src/sentry/migrations/0804_delete_metrics_key_indexer_pt2.py
new file mode 100644
index 00000000000000..c863a8ca7f503e
--- /dev/null
+++ b/src/sentry/migrations/0804_delete_metrics_key_indexer_pt2.py
@@ -0,0 +1,27 @@
+# Generated by Django 5.1.4 on 2024-12-26 14:38
+
+from sentry.new_migrations.migrations import CheckedMigration
+from sentry.new_migrations.monkey.models import SafeDeleteModel
+from sentry.new_migrations.monkey.state import DeletionAction
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("sentry", "0803_delete_unused_metricskeyindexer_pt1"),
+ ]
+
+ operations = [SafeDeleteModel(name="MetricsKeyIndexer", deletion_action=DeletionAction.DELETE)]
diff --git a/src/sentry/models/activity.py b/src/sentry/models/activity.py
index 827d244880cf3c..363c6cd7dd7f1a 100644
--- a/src/sentry/models/activity.py
+++ b/src/sentry/models/activity.py
@@ -23,8 +23,9 @@
)
from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey
from sentry.db.models.manager.base import BaseManager
+from sentry.issues.grouptype import get_group_type_by_type_id
from sentry.tasks import activity
-from sentry.types.activity import CHOICES, ActivityType
+from sentry.types.activity import CHOICES, STATUS_CHANGE_ACTIVITY_TYPES, ActivityType
from sentry.types.group import PriorityLevel
if TYPE_CHECKING:
@@ -127,10 +128,10 @@ class Meta:
__repr__ = sane_repr("project_id", "group_id", "event_id", "user_id", "type", "ident")
@staticmethod
- def get_version_ident(version):
+ def get_version_ident(version: str | None) -> str:
return (version or "")[:64]
- def __init__(self, *args, **kwargs):
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
from sentry.models.release import Release
@@ -142,7 +143,7 @@ def __init__(self, *args, **kwargs):
if self.type == ActivityType.ASSIGNED.value:
self.data["assignee"] = str(self.data["assignee"])
- def save(self, *args, **kwargs):
+ def save(self, *args: Any, **kwargs: Any) -> None:
created = bool(not self.id)
super().save(*args, **kwargs)
@@ -177,8 +178,8 @@ def save(self, *args, **kwargs):
sender=Group, instance=self.group, created=True, update_fields=["num_comments"]
)
- def delete(self, *args, **kwargs):
- super().delete(*args, **kwargs)
+ def delete(self, *args: Any, **kwargs: Any) -> tuple[int, dict[str, int]]:
+ result = super().delete(*args, **kwargs)
# HACK: support Group.num_comments
if self.type == ActivityType.NOTE.value and self.group is not None:
@@ -190,7 +191,21 @@ def delete(self, *args, **kwargs):
sender=Group, instance=self.group, created=True, update_fields=["num_comments"]
)
- def send_notification(self):
+ return result
+
+ def send_notification(self) -> None:
+ if self.group:
+ group_type = get_group_type_by_type_id(self.group.type)
+ has_status_change_notifications = group_type.enable_status_change_workflow_notifications
+ is_status_change = self.type in {
+ activity.value for activity in STATUS_CHANGE_ACTIVITY_TYPES
+ }
+
+ # Skip sending the activity notification if the group type does not
+ # support status change workflow notifications
+ if is_status_change and not has_status_change_notifications:
+ return
+
activity.send_activity_notifications.delay(self.id)
diff --git a/src/sentry/models/apiapplication.py b/src/sentry/models/apiapplication.py
index bd44f5d8bf019c..a1912fffbc75e8 100644
--- a/src/sentry/models/apiapplication.py
+++ b/src/sentry/models/apiapplication.py
@@ -111,16 +111,20 @@ def is_active(self):
def is_allowed_response_type(self, value):
return value in ("code", "token")
- def is_valid_redirect_uri(self, value):
+ def normalize_url(self, value):
parts = urlparse(value)
normalized_path = os.path.normpath(parts.path)
- if value.endswith("/"):
+ if normalized_path == ".":
+ normalized_path = "/"
+ elif value.endswith("/") and not normalized_path.endswith("/"):
normalized_path += "/"
- value = urlunparse(parts._replace(path=normalized_path))
+ return urlunparse(parts._replace(path=normalized_path))
+
+ def is_valid_redirect_uri(self, value):
+ value = self.normalize_url(value)
- for ruri in self.redirect_uris.split("\n"):
- if parts.netloc != urlparse(ruri).netloc:
- continue
+ for redirect_uri in self.redirect_uris.split("\n"):
+ ruri = self.normalize_url(redirect_uri)
if value == ruri:
return True
if value.startswith(ruri):
diff --git a/src/sentry/models/apigrant.py b/src/sentry/models/apigrant.py
index f949712c2080f0..d511a611aee4fb 100644
--- a/src/sentry/models/apigrant.py
+++ b/src/sentry/models/apigrant.py
@@ -78,6 +78,11 @@ class Meta:
app_label = "sentry"
db_table = "sentry_apigrant"
+ def __str__(self):
+ return (
+ f"api_grant_id={self.id}, user_id={self.user.id}, application_id={self.application.id}"
+ )
+
def get_scopes(self):
if self.scope_list:
return self.scope_list
diff --git a/src/sentry/models/apikey.py b/src/sentry/models/apikey.py
index 612d0ca606822c..a93d6190f28818 100644
--- a/src/sentry/models/apikey.py
+++ b/src/sentry/models/apikey.py
@@ -56,7 +56,7 @@ def handle_async_replication(self, region_name: str, shard_identifier: int) -> N
)
def __str__(self):
- return str(self.key)
+ return f"api_key_id={self.id}, status={self.status}"
@classmethod
def generate_api_key(cls):
diff --git a/src/sentry/models/apitoken.py b/src/sentry/models/apitoken.py
index e7c80482ac0335..92522b78368a85 100644
--- a/src/sentry/models/apitoken.py
+++ b/src/sentry/models/apitoken.py
@@ -137,7 +137,7 @@ class Meta:
__repr__ = sane_repr("user_id", "token", "application_id")
def __str__(self):
- return force_str(self.token)
+ return f"token_id={force_str(self.id)}"
def _set_plaintext_token(self, token: str) -> None:
"""Set the plaintext token for one-time reading
diff --git a/src/sentry/models/group.py b/src/sentry/models/group.py
index c5243200ec00ab..ac8348e2f827a3 100644
--- a/src/sentry/models/group.py
+++ b/src/sentry/models/group.py
@@ -229,24 +229,33 @@ class EventOrdering(Enum):
]
-def get_oldest_or_latest_event_for_environments(
- ordering: EventOrdering, environments: Sequence[str], group: Group
+def get_oldest_or_latest_event(
+ group: Group,
+ ordering: EventOrdering,
+ conditions: Sequence[Condition] | None = None,
+ start: datetime | None = None,
+ end: datetime | None = None,
) -> GroupEvent | None:
- conditions = []
-
- if len(environments) > 0:
- conditions.append(["environment", "IN", environments])
if group.issue_category == GroupCategory.ERROR:
dataset = Dataset.Events
else:
dataset = Dataset.IssuePlatform
- _filter = eventstore.Filter(
- conditions=conditions, project_ids=[group.project_id], group_ids=[group.id]
- )
- events = eventstore.backend.get_events(
- filter=_filter,
+ all_conditions = [
+ Condition(Column("project_id"), Op.IN, [group.project.id]),
+ Condition(Column("group_id"), Op.IN, [group.id]),
+ ]
+
+ if conditions:
+ all_conditions.extend(conditions)
+
+ events = eventstore.backend.get_events_snql(
+ organization_id=group.project.organization_id,
+ group_id=group.id,
+ start=start,
+ end=end,
+ conditions=all_conditions,
limit=1,
orderby=ordering.value,
referrer="Group.get_latest",
@@ -260,32 +269,32 @@ def get_oldest_or_latest_event_for_environments(
return None
-def get_recommended_event_for_environments(
- environments: Sequence[Environment],
+def get_recommended_event(
group: Group,
conditions: Sequence[Condition] | None = None,
+ start: datetime | None = None,
+ end: datetime | None = None,
) -> GroupEvent | None:
if group.issue_category == GroupCategory.ERROR:
dataset = Dataset.Events
else:
dataset = Dataset.IssuePlatform
- all_conditions = []
- if len(environments) > 0:
- all_conditions.append(
- Condition(Column("environment"), Op.IN, [e.name for e in environments])
- )
- all_conditions.append(Condition(Column("project_id"), Op.IN, [group.project.id]))
- all_conditions.append(Condition(Column("group_id"), Op.IN, [group.id]))
+ all_conditions = [
+ Condition(Column("project_id"), Op.IN, [group.project.id]),
+ Condition(Column("group_id"), Op.IN, [group.id]),
+ ]
if conditions:
all_conditions.extend(conditions)
- end = group.last_seen + timedelta(minutes=1)
- start = end - timedelta(days=7)
+ default_end = group.last_seen + timedelta(minutes=1)
+ default_start = default_end - timedelta(days=7)
expired, _ = outside_retention_with_modified_start(
- start, end, Organization(group.project.organization_id)
+ start=start if start else default_start,
+ end=end if end else default_end,
+ organization=Organization(group.project.organization_id),
)
if expired:
@@ -294,8 +303,8 @@ def get_recommended_event_for_environments(
events = eventstore.backend.get_events_snql(
organization_id=group.project.organization_id,
group_id=group.id,
- start=start,
- end=end,
+ start=start if start else default_start,
+ end=end if end else default_end,
conditions=all_conditions,
limit=1,
orderby=EventOrdering.RECOMMENDED.value,
@@ -764,46 +773,105 @@ def get_share_id(self):
# Otherwise it has not been shared yet.
return None
- def get_latest_event(self) -> GroupEvent | None:
- if not hasattr(self, "_latest_event"):
- self._latest_event = self.get_latest_event_for_environments()
-
- return self._latest_event
+ def get_latest_event(
+ self,
+ conditions: Sequence[Condition] | None = None,
+ start: datetime | None = None,
+ end: datetime | None = None,
+ ) -> GroupEvent | None:
+ """
+ Returns the latest/newest event given the conditions and time range.
+ If no event is found, returns None.
+ """
+ return get_oldest_or_latest_event(
+ group=self,
+ ordering=EventOrdering.LATEST,
+ conditions=conditions,
+ start=start,
+ end=end,
+ )
def get_latest_event_for_environments(
self, environments: Sequence[str] = ()
) -> GroupEvent | None:
- return get_oldest_or_latest_event_for_environments(
- EventOrdering.LATEST,
- environments,
- self,
+ """
+ Legacy special case of `self.get_latest_event` for environments and no date range.
+ Kept for compatability, but it's advised to use `self.get_latest_event` directly.
+ """
+ conditions = (
+ [Condition(Column("environment"), Op.IN, environments)] if len(environments) > 0 else []
+ )
+ return self.get_latest_event(conditions=conditions)
+
+ def get_oldest_event(
+ self,
+ conditions: Sequence[Condition] | None = None,
+ start: datetime | None = None,
+ end: datetime | None = None,
+ ) -> GroupEvent | None:
+ """
+ Returns the oldest event given the conditions and time range.
+ If no event is found, returns None.
+ """
+ return get_oldest_or_latest_event(
+ group=self,
+ ordering=EventOrdering.OLDEST,
+ conditions=conditions,
+ start=start,
+ end=end,
)
def get_oldest_event_for_environments(
self, environments: Sequence[str] = ()
) -> GroupEvent | None:
- return get_oldest_or_latest_event_for_environments(
- EventOrdering.OLDEST,
- environments,
- self,
+ """
+ Legacy special case of `self.get_oldest_event` for environments and no date range.
+ Kept for compatability, but it's advised to use `self.get_oldest_event` directly.
+ """
+ conditions = (
+ [Condition(Column("environment"), Op.IN, environments)] if len(environments) > 0 else []
)
+ return self.get_oldest_event(conditions=conditions)
- def get_recommended_event_for_environments(
+ def get_recommended_event(
self,
- environments: Sequence[Environment] = (),
conditions: Sequence[Condition] | None = None,
+ start: datetime | None = None,
+ end: datetime | None = None,
) -> GroupEvent | None:
- maybe_event = get_recommended_event_for_environments(
- environments,
- self,
- conditions,
+ """
+ Returns a recommended event given the conditions and time range.
+ If a helpful recommendation is not found, it will fallback to the latest event.
+ If neither are found, returns None.
+ """
+ maybe_event = get_recommended_event(
+ group=self,
+ conditions=conditions,
+ start=start,
+ end=end,
)
return (
maybe_event
if maybe_event
- else self.get_latest_event_for_environments([env.name for env in environments])
+ else self.get_latest_event(conditions=conditions, start=start, end=end)
)
+ def get_recommended_event_for_environments(
+ self,
+ environments: Sequence[Environment] = (),
+ conditions: Sequence[Condition] | None = None,
+ ) -> GroupEvent | None:
+ """
+ Legacy special case of `self.get_recommended_event` for environments and no date range.
+ Kept for compatability, but it's advised to use `self.get_recommended_event` directly.
+ """
+ all_conditions: list[Condition] = list(conditions) if conditions else []
+ if len(environments) > 0:
+ all_conditions.append(
+ Condition(Column("environment"), Op.IN, [e.name for e in environments])
+ )
+ return self.get_recommended_event(conditions=all_conditions)
+
def get_suspect_commit(self) -> Commit | None:
from sentry.models.groupowner import GroupOwner, GroupOwnerType
diff --git a/src/sentry/models/groupassignee.py b/src/sentry/models/groupassignee.py
index b4479012ecb2a9..ce6dd33739d4df 100644
--- a/src/sentry/models/groupassignee.py
+++ b/src/sentry/models/groupassignee.py
@@ -134,12 +134,12 @@ def assign(
self,
group: Group,
assigned_to: Team | RpcUser | User,
- acting_user: User | None = None,
+ acting_user: RpcUser | User | None = None,
create_only: bool = False,
extra: dict[str, str] | None = None,
force_autoassign: bool = False,
assignment_source: AssignmentSource | None = None,
- ):
+ ) -> dict[str, bool]:
from sentry.integrations.utils.sync import sync_group_assignee_outbound
from sentry.models.activity import Activity
from sentry.models.groupsubscription import GroupSubscription
@@ -204,9 +204,8 @@ def assign(
def deassign(
self,
group: Group,
+ # XXX: Some callers do not pass an acting user but we should make it mandatory
acting_user: User | RpcUser | None = None,
- assigned_to: Team | RpcUser | None = None,
- extra: dict[str, str] | None = None,
assignment_source: AssignmentSource | None = None,
) -> None:
from sentry.integrations.utils.sync import sync_group_assignee_outbound
@@ -272,7 +271,7 @@ class Meta:
__repr__ = sane_repr("group_id", "user_id", "team_id")
- def save(self, *args, **kwargs):
+ def save(self, *args: Any, **kwargs: Any) -> None:
assert not (self.user_id is not None and self.team_id is not None) and not (
self.user_id is None and self.team_id is None
), "Must have Team or User, not both"
diff --git a/src/sentry/models/grouphistory.py b/src/sentry/models/grouphistory.py
index 96e04922a032ac..06ddb9b3e972da 100644
--- a/src/sentry/models/grouphistory.py
+++ b/src/sentry/models/grouphistory.py
@@ -1,8 +1,12 @@
-from typing import TYPE_CHECKING, ClassVar, Optional, Union
+from __future__ import annotations
+
+import datetime
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, ClassVar
from django.conf import settings
from django.db import models
-from django.db.models import Q
+from django.db.models import Q, QuerySet
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
@@ -149,7 +153,7 @@ class GroupHistoryStatus:
class GroupHistoryManager(BaseManager["GroupHistory"]):
- def filter_to_team(self, team: "Team"):
+ def filter_to_team(self, team: Team) -> QuerySet[GroupHistory]:
from sentry.models.groupassignee import GroupAssignee
from sentry.models.project import Project
@@ -241,13 +245,13 @@ def owner(self, actor: Actor | None) -> None:
self.team_id = actor.id
-def get_prev_history(group, status):
+def get_prev_history(group: Group, status: int) -> GroupHistory | None:
"""
Finds the most recent row that is the inverse of this history row, if one exists.
"""
previous_statuses = PREVIOUS_STATUSES.get(status)
if not previous_statuses:
- return
+ return None
prev_histories = GroupHistory.objects.filter(
group=group, status__in=previous_statuses
@@ -256,11 +260,11 @@ def get_prev_history(group, status):
def record_group_history_from_activity_type(
- group: "Group",
+ group: Group,
activity_type: int,
- actor: Union["User", "Team"] | None = None,
- release: Optional["Release"] = None,
-):
+ actor: RpcUser | User | Team | None = None,
+ release: Release | None = None,
+) -> GroupHistory | None:
"""
Writes a `GroupHistory` row for an activity type if there's a relevant `GroupHistoryStatus` that
maps to it
@@ -275,14 +279,15 @@ def record_group_history_from_activity_type(
if status is not None:
return record_group_history(group, status, actor, release)
+ return None
def record_group_history(
- group: "Group",
+ group: Group,
status: int,
- actor: Union["User", "RpcUser", "Team"] | None = None,
- release: Optional["Release"] = None,
-):
+ actor: User | RpcUser | Team | None = None,
+ release: Release | None = None,
+) -> GroupHistory:
from sentry.models.team import Team
from sentry.users.models.user import User
from sentry.users.services.user import RpcUser
@@ -312,16 +317,16 @@ def record_group_history(
def bulk_record_group_history(
- groups: list["Group"],
+ groups: Sequence[Group],
status: int,
- actor: Union["User", "RpcUser", "Team"] | None = None,
- release: Optional["Release"] = None,
-):
+ actor: User | RpcUser | Team | None = None,
+ release: Release | None = None,
+) -> list[GroupHistory]:
from sentry.models.team import Team
from sentry.users.models.user import User
from sentry.users.services.user import RpcUser
- def get_prev_history_date(group, status):
+ def get_prev_history_date(group: Group, status: int) -> datetime.datetime | None:
prev_history = get_prev_history(group, status)
return prev_history.date_added if prev_history else None
diff --git a/src/sentry/models/groupinbox.py b/src/sentry/models/groupinbox.py
index a4272cb1ad2cdf..efd5332df2e8bf 100644
--- a/src/sentry/models/groupinbox.py
+++ b/src/sentry/models/groupinbox.py
@@ -1,5 +1,10 @@
+from __future__ import annotations
+
import logging
+from collections.abc import Iterable
+from datetime import datetime
from enum import Enum
+from typing import TypedDict
import jsonschema
import sentry_sdk
@@ -135,10 +140,24 @@ def bulk_remove_groups_from_inbox(groups, action=None, user=None, referrer=None)
pass
-def get_inbox_details(group_list):
+class InboxReasonDetails(TypedDict):
+ until: str | None
+ count: int | None
+ window: int | None
+ user_count: int | None
+ user_window: int | None
+
+
+class InboxDetails(TypedDict):
+ reason: int
+ reason_details: InboxReasonDetails | None
+ date_added: datetime
+
+
+def get_inbox_details(group_list: Iterable[Group]) -> dict[int, InboxDetails]:
group_ids = [g.id for g in group_list]
group_inboxes = GroupInbox.objects.filter(group__in=group_ids)
- inbox_stats = {
+ return {
gi.group_id: {
"reason": gi.reason,
"reason_details": gi.reason_details,
@@ -146,5 +165,3 @@ def get_inbox_details(group_list):
}
for gi in group_inboxes
}
-
- return inbox_stats
diff --git a/src/sentry/models/organizationonboardingtask.py b/src/sentry/models/organizationonboardingtask.py
index d21977ffb5eb2b..5d1cefecd94f87 100644
--- a/src/sentry/models/organizationonboardingtask.py
+++ b/src/sentry/models/organizationonboardingtask.py
@@ -179,11 +179,6 @@ class OrganizationOnboardingTask(AbstractOnboardingTask):
OnboardingTask.INVITE_MEMBER,
OnboardingTask.SECOND_PLATFORM,
OnboardingTask.RELEASE_TRACKING,
- # TODO(Telemetry Experience): This task is shown conditionally
- # according to the platform.
- # Check if we can do the same here and mark onboarding as
- # complete if platform does not support sourcemaps
- OnboardingTask.SOURCEMAPS,
OnboardingTask.ALERT_RULE,
OnboardingTask.FIRST_TRANSACTION,
OnboardingTask.SESSION_REPLAY,
@@ -192,6 +187,13 @@ class OrganizationOnboardingTask(AbstractOnboardingTask):
]
)
+ NEW_REQUIRED_ONBOARDING_TASKS_WITH_SOURCE_MAPS = frozenset(
+ [
+ *NEW_REQUIRED_ONBOARDING_TASKS,
+ OnboardingTask.SOURCEMAPS,
+ ]
+ )
+
SKIPPABLE_TASKS = frozenset(
[
OnboardingTask.INVITE_MEMBER,
diff --git a/src/sentry/models/projectteam.py b/src/sentry/models/projectteam.py
index 183ab0c573b145..ad493b2d189ee7 100644
--- a/src/sentry/models/projectteam.py
+++ b/src/sentry/models/projectteam.py
@@ -16,23 +16,12 @@
class ProjectTeamManager(BaseManager["ProjectTeam"]):
def get_for_teams_with_org_cache(self, teams: Sequence["Team"]) -> QuerySet["ProjectTeam"]:
- project_teams = (
+ return (
self.filter(team__in=teams, project__status=ObjectStatus.ACTIVE)
.order_by("project__name", "project__slug")
- .select_related("project")
+ .select_related("project", "project__organization")
)
- # TODO(dcramer): we should query in bulk for ones we're missing here
- orgs = {i.organization_id: i.organization for i in teams}
-
- for project_team in project_teams:
- if project_team.project.organization_id in orgs:
- project_team.project.set_cached_field_value(
- "organization", orgs[project_team.project.organization_id]
- )
-
- return project_teams
-
@region_silo_model
class ProjectTeam(Model):
diff --git a/src/sentry/monitors/endpoints/organization_monitor_index.py b/src/sentry/monitors/endpoints/organization_monitor_index.py
index 585a37cf26d5c1..62aebb05d17611 100644
--- a/src/sentry/monitors/endpoints/organization_monitor_index.py
+++ b/src/sentry/monitors/endpoints/organization_monitor_index.py
@@ -11,6 +11,8 @@
When,
)
from drf_spectacular.utils import extend_schema
+from rest_framework.request import Request
+from rest_framework.response import Response
from sentry import audit_log, quotas
from sentry.api.api_owners import ApiOwner
@@ -65,10 +67,6 @@ def map_value_to_constant(constant, value):
return getattr(constant, value)
-from rest_framework.request import Request
-from rest_framework.response import Response
-
-
def flip_sort_direction(sort_field: str) -> str:
if sort_field[0] == "-":
sort_field = sort_field[1:]
@@ -365,8 +363,11 @@ def put(self, request: Request, organization) -> Response:
result = dict(validator.validated_data)
+ projects = self.get_projects(request, organization, include_all_accessible=True)
+ project_ids = [project.id for project in projects]
+
monitor_guids = result.pop("ids", [])
- monitors = Monitor.objects.filter(guid__in=monitor_guids)
+ monitors = Monitor.objects.filter(guid__in=monitor_guids, project_id__in=project_ids)
status = result.get("status")
# If enabling monitors, ensure we can assign all before moving forward
diff --git a/src/sentry/monitors/endpoints/organization_monitor_index_stats.py b/src/sentry/monitors/endpoints/organization_monitor_index_stats.py
index ba08695ccca103..55e9171049cff5 100644
--- a/src/sentry/monitors/endpoints/organization_monitor_index_stats.py
+++ b/src/sentry/monitors/endpoints/organization_monitor_index_stats.py
@@ -57,6 +57,9 @@ def get(self, request: Request, organization) -> Response:
monitor_guids: list[str] = request.GET.getlist("monitor")
+ projects = self.get_projects(request, organization, include_all_accessible=True)
+ project_ids = [project.id for project in projects]
+
# Pre-fetch the monitor-ids and their guid. This is an
# optimization to eliminate a join against the monitor table which
# significantly inflates the size of the aggregation states.
@@ -68,9 +71,11 @@ def get(self, request: Request, organization) -> Response:
monitor_map = {
id: str(guid)
for id, guid in Monitor.objects.filter(
- organization_id=organization.id, guid__in=monitor_guids
+ organization_id=organization.id, project_id__in=project_ids, guid__in=monitor_guids
).values_list("id", "guid")
}
+ # Filter monitors, keeping only ones that the user has access to.
+ monitor_guids = [guid for guid in monitor_guids if guid in monitor_map.values()]
# We only care about the name but we don't want to join to get it. So we're maintaining
# this map until the very end where we'll map from monitor_environment to environment to
diff --git a/src/sentry/notifications/utils/__init__.py b/src/sentry/notifications/utils/__init__.py
index 370a6d5259305c..c4de152a58b219 100644
--- a/src/sentry/notifications/utils/__init__.py
+++ b/src/sentry/notifications/utils/__init__.py
@@ -28,7 +28,6 @@
from sentry.models.commit import Commit
from sentry.models.deploy import Deploy
from sentry.models.environment import Environment
-from sentry.models.eventerror import EventError
from sentry.models.group import Group
from sentry.models.grouplink import GroupLink
from sentry.models.organization import Organization
@@ -119,24 +118,6 @@ def get_environment_for_deploy(deploy: Deploy | None) -> str:
return "Default Environment"
-def summarize_issues(
- issues: Iterable[Mapping[str, Mapping[str, Any]]]
-) -> Iterable[Mapping[str, str]]:
- rv = []
- for issue in issues:
- extra_info = None
- msg_d = dict(issue["data"])
- msg_d["type"] = issue["type"]
-
- if "image_path" in issue["data"]:
- extra_info = issue["data"]["image_path"].rsplit("/", 1)[-1]
- if "image_arch" in issue["data"]:
- extra_info = "{} ({})".format(extra_info, issue["data"]["image_arch"])
-
- rv.append({"message": EventError(msg_d).message, "extra_info": extra_info})
- return rv
-
-
def get_email_link_extra_params(
referrer: str = "alert_email",
environment: str | None = None,
@@ -275,10 +256,7 @@ def has_integrations(organization: Organization, project: Project) -> bool:
def is_alert_rule_integration(provider: IntegrationProvider) -> bool:
- return any(
- feature == (IntegrationFeatures.ALERT_RULE or IntegrationFeatures.ENTERPRISE_ALERT_RULE)
- for feature in provider.features
- )
+ return IntegrationFeatures.ALERT_RULE in provider.features
def has_alert_integration(project: Project) -> bool:
diff --git a/src/sentry/onboarding_tasks/backends/organization_onboarding_task.py b/src/sentry/onboarding_tasks/backends/organization_onboarding_task.py
index c7510abb4c66c1..5ceac260f92b26 100644
--- a/src/sentry/onboarding_tasks/backends/organization_onboarding_task.py
+++ b/src/sentry/onboarding_tasks/backends/organization_onboarding_task.py
@@ -10,10 +10,12 @@
OnboardingTaskStatus,
OrganizationOnboardingTask,
)
+from sentry.models.project import Project
from sentry.onboarding_tasks.base import OnboardingTaskBackend
from sentry.users.models.user import User
from sentry.users.services.user.model import RpcUser
from sentry.utils import json
+from sentry.utils.platform_categories import SOURCE_MAPS
class OrganizationOnboardingTaskBackend(OnboardingTaskBackend[OrganizationOnboardingTask]):
@@ -47,7 +49,21 @@ def try_mark_onboarding_complete(
organization = Organization.objects.get_from_cache(id=organization_id)
if features.has("organizations:quick-start-updates", organization, actor=user):
- required_tasks = OrganizationOnboardingTask.NEW_REQUIRED_ONBOARDING_TASKS
+
+ projects = Project.objects.filter(organization=organization)
+ project_with_source_maps = next(
+ (p for p in projects if p.platform in SOURCE_MAPS), None
+ )
+
+ # If a project supports source maps, we require them to complete the quick start.
+ # It's possible that the first project doesn't have source maps,
+ # but the second project (which users are guided to create in the "Add Sentry to other parts of the app" step) may have source maps.
+ required_tasks = (
+ OrganizationOnboardingTask.NEW_REQUIRED_ONBOARDING_TASKS_WITH_SOURCE_MAPS
+ if project_with_source_maps
+ else OrganizationOnboardingTask.NEW_REQUIRED_ONBOARDING_TASKS
+ )
+
else:
required_tasks = OrganizationOnboardingTask.REQUIRED_ONBOARDING_TASKS
diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py
index c8cc62186a41ab..e8eeb9bfd10c81 100644
--- a/src/sentry/options/defaults.py
+++ b/src/sentry/options/defaults.py
@@ -907,13 +907,13 @@
register(
"seer.similarity.global-rate-limit",
type=Dict,
- default={"limit": 20, "window": 1},
+ default={"limit": 20, "window": 1}, # window is in seconds
flags=FLAG_ALLOW_EMPTY | FLAG_AUTOMATOR_MODIFIABLE,
)
register(
"seer.similarity.per-project-rate-limit",
type=Dict,
- default={"limit": 5, "window": 1},
+ default={"limit": 5, "window": 1}, # window is in seconds
flags=FLAG_ALLOW_EMPTY | FLAG_AUTOMATOR_MODIFIABLE,
)
@@ -1122,11 +1122,6 @@
# Controls the rollout rate in percent (`0.0` to `1.0`) for metric stats.
register("relay.metric-stats.rollout-rate", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE)
-# Controls the sample rate of metrics summaries computation in Relay.
-register(
- "relay.compute-metrics-summaries.sample-rate", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE
-)
-
# Controls whether generic inbound filters are sent to Relay.
register("relay.emit-generic-inbound-filters", default=False, flags=FLAG_AUTOMATOR_MODIFIABLE)
@@ -1227,6 +1222,12 @@
default=0,
flags=FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE,
)
+register(
+ "project-abuse-quota.attachment-item-limit",
+ type=Int,
+ default=0,
+ flags=FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE,
+)
register(
"project-abuse-quota.session-limit",
type=Int,
@@ -2115,6 +2116,20 @@
flags=FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE,
)
+register(
+ "statistical_detectors.throughput.threshold.transactions",
+ default=50,
+ type=Int,
+ flags=FLAG_AUTOMATOR_MODIFIABLE,
+)
+
+register(
+ "statistical_detectors.throughput.threshold.functions",
+ default=25,
+ type=Int,
+ flags=FLAG_AUTOMATOR_MODIFIABLE,
+)
+
register(
"options_automator_slack_webhook_enabled",
default=True,
@@ -2293,12 +2308,6 @@
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
-register(
- "releases_v2.single-tenant",
- default=False,
- flags=FLAG_AUTOMATOR_MODIFIABLE,
-)
-
# The flag disables the file io on main thread detector
register(
"performance_issues.file_io_main_thread.disabled",
@@ -2504,43 +2513,6 @@
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
-# killswitch for profiling ddm functions metrics.
-# Enable/Disable the ingestion of function metrics
-# in the generic metrics platform
-register(
- "profiling.generic_metrics.functions_ingestion.enabled",
- default=False,
- type=Bool,
- flags=FLAG_AUTOMATOR_MODIFIABLE,
-)
-
-# list of org IDs for which we'll write the function
-# metrics to the generic metrics platform
-register(
- "profiling.generic_metrics.functions_ingestion.allowed_org_ids",
- type=Sequence,
- default=[],
- flags=FLAG_ALLOW_EMPTY | FLAG_AUTOMATOR_MODIFIABLE,
-)
-
-# list of project IDs we want to deny ingesting profiles
-# function metrics into the generic metrics platform
-register(
- "profiling.generic_metrics.functions_ingestion.denied_proj_ids",
- type=Sequence,
- default=[],
- flags=FLAG_ALLOW_EMPTY | FLAG_AUTOMATOR_MODIFIABLE,
-)
-
-# rollout rate: % of profiles for which we ingest the extracted profile
-# functions metrics into the generic metrics platform
-register(
- "profiling.generic_metrics.functions_ingestion.rollout_rate",
- type=Float,
- default=0.0,
- flags=FLAG_AUTOMATOR_MODIFIABLE,
-)
-
# temporary option for logging canonical key fallback stacktraces
register(
"canonical-fallback.send-error-to-sentry",
@@ -2833,12 +2805,6 @@
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
-register(
- "celery_split_queue_legacy_mode",
- default=["post_process_transactions"],
- flags=FLAG_AUTOMATOR_MODIFIABLE,
-)
-
register(
"celery_split_queue_rollout",
default={"post_process_transactions": 1.0},
@@ -2917,15 +2883,6 @@
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
-# list of project IDs for which we'll apply
-# stack trace rules to the profiles in case
-# there are any rules defined
-register(
- "profiling.stack_trace_rules.allowed_project_ids",
- type=Sequence,
- default=[],
- flags=FLAG_ALLOW_EMPTY | FLAG_AUTOMATOR_MODIFIABLE,
-)
register(
"performance.event-tracker.sample-rate.transactions",
default=0.0,
diff --git a/src/sentry/plugins/base/configuration.py b/src/sentry/plugins/base/configuration.py
deleted file mode 100644
index d41eacbd4777c4..00000000000000
--- a/src/sentry/plugins/base/configuration.py
+++ /dev/null
@@ -1,136 +0,0 @@
-from django.contrib import messages
-from django.http import Http404, HttpResponseRedirect
-from django.urls import reverse
-from django.utils.safestring import mark_safe
-from django.utils.translation import gettext as _
-
-from sentry import options
-from sentry.api import client
-from sentry.api.serializers import serialize
-from sentry.models.options.project_option import ProjectOption
-from sentry.utils import json
-from sentry.web.helpers import render_to_string
-
-
-def react_plugin_config(plugin, project, request):
- response = client.get(
- f"/projects/{project.organization.slug}/{project.slug}/plugins/{plugin.slug}/",
- request=request,
- )
- nonce = ""
- if hasattr(request, "csp_nonce"):
- nonce = f' nonce="{request.csp_nonce}"'
-
- # Pretty sure this is not in use, and if it is, it has been broken since
- # https://github.com/getsentry/sentry/pull/13578/files#diff-d17d91cc629f5f2e4582adb6e52d426f654452b751da97bafa25160b78566438L206
- return mark_safe(
- """
-
-
- """
- % (
- nonce,
- json.dumps_htmlsafe(serialize(project, request.user)),
- json.dumps_htmlsafe(serialize(project.organization, request.user)),
- json.dumps_htmlsafe(response.data),
- )
- )
-
-
-def default_plugin_config(plugin, project, request):
- if plugin.can_enable_for_projects() and not plugin.can_configure_for_project(project):
- raise Http404()
-
- plugin_key = plugin.get_conf_key()
- form_class = plugin.get_conf_form(project)
- template = plugin.get_conf_template(project)
-
- if form_class is None:
- return HttpResponseRedirect(
- reverse("sentry-manage-project", args=[project.organization.slug, project.slug])
- )
-
- test_results = None
-
- form = form_class(
- request.POST if request.POST.get("plugin") == plugin.slug else None,
- initial=plugin.get_conf_options(project),
- prefix=plugin_key,
- )
- if form.is_valid():
- if "action_test" in request.POST and plugin.is_testable():
- test_results = plugin.test_configuration_and_get_test_results(project)
- else:
- for field, value in form.cleaned_data.items():
- key = f"{plugin_key}:{field}"
- if project:
- ProjectOption.objects.set_value(project, key, value)
- else:
- options.set(key, value, channel=options.UpdateChannel.APPLICATION)
-
- messages.add_message(
- request, messages.SUCCESS, _("Your settings were saved successfully.")
- )
- return HttpResponseRedirect(request.path)
-
- # TODO(mattrobenolt): Reliably determine if a plugin is configured
- # if hasattr(plugin, 'is_configured'):
- # is_configured = plugin.is_configured(project)
- # else:
- # is_configured = True
- is_configured = True
-
- return mark_safe(
- render_to_string(
- template=template,
- context={
- "form": form,
- "plugin": plugin,
- "plugin_description": plugin.get_description() or "",
- "plugin_test_results": test_results,
- "plugin_is_configured": is_configured,
- },
- request=request,
- )
- )
-
-
-def default_issue_plugin_config(plugin, project, form_data):
- plugin_key = plugin.get_conf_key()
- for field, value in form_data.items():
- key = f"{plugin_key}:{field}"
- if project:
- ProjectOption.objects.set_value(project, key, value)
- else:
- options.set(key, value, channel=options.UpdateChannel.APPLICATION)
-
-
-def default_plugin_options(plugin, project):
- form_class = plugin.get_conf_form(project)
- if form_class is None:
- return {}
-
- NOTSET = object()
- plugin_key = plugin.get_conf_key()
- initials = plugin.get_form_initial(project)
- for field in form_class.base_fields:
- key = f"{plugin_key}:{field}"
- if project is not None:
- value = ProjectOption.objects.get_value(project, key, NOTSET)
- else:
- value = options.get(key)
- if value is not NOTSET:
- initials[field] = value
- return initials
diff --git a/src/sentry/plugins/base/v1.py b/src/sentry/plugins/base/v1.py
index 8b186f2b5a7bc3..86f0167c17d27e 100644
--- a/src/sentry/plugins/base/v1.py
+++ b/src/sentry/plugins/base/v1.py
@@ -1,7 +1,5 @@
from __future__ import annotations
-__all__ = ("Plugin",)
-
import logging
from collections.abc import Sequence
from threading import local
@@ -13,17 +11,17 @@
from sentry.auth import access
from sentry.models.project import Project
from sentry.plugins import HIDDEN_PLUGINS
-from sentry.plugins.base.configuration import default_plugin_config, default_plugin_options
from sentry.plugins.base.response import DeferredResponse
from sentry.plugins.base.view import PluggableViewMixin
from sentry.plugins.config import PluginConfigMixin
from sentry.plugins.status import PluginStatusMixin
from sentry.projects.services.project import RpcProject
-from sentry.utils.hashlib import md5_text
if TYPE_CHECKING:
from django.utils.functional import _StrPromise
+__all__ = ("Plugin",)
+
class PluginMount(type):
def __new__(cls, name, bases, attrs):
@@ -206,25 +204,6 @@ def get_conf_template(self, project=None):
return self.project_conf_template
return self.site_conf_template
- def get_conf_options(self, project=None):
- """
- Returns a dict of all of the configured options for a project.
-
- >>> plugin.get_conf_options(project)
- """
- return default_plugin_options(self, project)
-
- def get_conf_version(self, project):
- """
- Returns a version string that represents the current configuration state.
-
- If any option changes or new options added, the version will change.
-
- >>> plugin.get_conf_version(project)
- """
- options = self.get_conf_options(project)
- return md5_text("&".join(sorted("%s=%s" % o for o in options.items()))).hexdigest()[:3]
-
def get_conf_title(self):
"""
Returns a string representing the title to be shown on the configuration page.
@@ -498,10 +477,6 @@ def is_hidden(self):
"""
return self.slug in HIDDEN_PLUGINS
- def configure(self, request, project=None):
- """Configures the plugin."""
- return default_plugin_config(self, project, request)
-
def get_url_module(self):
"""Allows a plugin to return the import path to a URL module."""
diff --git a/src/sentry/plugins/base/v2.py b/src/sentry/plugins/base/v2.py
index f1c0d9fdf3e16b..75909115896414 100644
--- a/src/sentry/plugins/base/v2.py
+++ b/src/sentry/plugins/base/v2.py
@@ -8,12 +8,10 @@
from django.http import HttpResponseRedirect
from sentry.plugins import HIDDEN_PLUGINS
-from sentry.plugins.base.configuration import default_plugin_config, default_plugin_options
from sentry.plugins.base.response import DeferredResponse
from sentry.plugins.config import PluginConfigMixin
from sentry.plugins.interfaces.releasehook import ReleaseHook
from sentry.plugins.status import PluginStatusMixin
-from sentry.utils.hashlib import md5_text
if TYPE_CHECKING:
from django.utils.functional import _StrPromise
@@ -197,25 +195,6 @@ def get_conf_template(self, project=None):
return self.project_conf_template
return self.site_conf_template
- def get_conf_options(self, project=None):
- """
- Returns a dict of all of the configured options for a project.
-
- >>> plugin.get_conf_options(project)
- """
- return default_plugin_options(self, project)
-
- def get_conf_version(self, project):
- """
- Returns a version string that represents the current configuration state.
-
- If any option changes or new options added, the version will change.
-
- >>> plugin.get_conf_version(project)
- """
- options = self.get_conf_options(project)
- return md5_text("&".join(sorted("%s=%s" % o for o in options.items()))).hexdigest()[:3]
-
def get_conf_title(self):
"""
Returns a string representing the title to be shown on the configuration page.
@@ -438,10 +417,6 @@ def get_custom_contexts(self):
return [MyContextType]
"""
- def configure(self, project, request):
- """Configures the plugin."""
- return default_plugin_config(self, project, request)
-
def get_url_module(self):
"""Allows a plugin to return the import path to a URL module."""
diff --git a/src/sentry/plugins/bases/data_forwarding.py b/src/sentry/plugins/bases/data_forwarding.py
index 44e0e1cf53606a..1a970044a417e0 100644
--- a/src/sentry/plugins/bases/data_forwarding.py
+++ b/src/sentry/plugins/bases/data_forwarding.py
@@ -6,16 +6,12 @@
from sentry.api.serializers import serialize
from sentry.eventstore.models import Event
from sentry.plugins.base import Plugin
-from sentry.plugins.base.configuration import react_plugin_config
from sentry.tsdb.base import TSDBModel
logger = logging.getLogger(__name__)
class DataForwardingPlugin(Plugin):
- def configure(self, project, request):
- return react_plugin_config(self, project, request)
-
def has_project_conf(self):
return True
diff --git a/src/sentry/plugins/bases/issue2.py b/src/sentry/plugins/bases/issue2.py
index 689808affdee65..d52e3b7b6613ea 100644
--- a/src/sentry/plugins/bases/issue2.py
+++ b/src/sentry/plugins/bases/issue2.py
@@ -13,7 +13,6 @@
from sentry.exceptions import PluginError # NOQA
from sentry.models.activity import Activity
from sentry.models.groupmeta import GroupMeta
-from sentry.plugins.base.configuration import react_plugin_config
from sentry.plugins.base.v1 import Plugin
from sentry.plugins.endpoints import PluginGroupEndpoint
from sentry.signals import issue_tracker_used
@@ -51,9 +50,6 @@ class IssueTrackingPlugin2(Plugin):
issue_fields: frozenset[str] | None = None
# issue_fields = frozenset(['id', 'title', 'url'])
- def configure(self, project, request):
- return react_plugin_config(self, project, request)
-
def get_plugin_type(self):
return "issue-tracking"
diff --git a/src/sentry/plugins/bases/notify.py b/src/sentry/plugins/bases/notify.py
index 420aa39e15922b..7016bb830a93e0 100644
--- a/src/sentry/plugins/bases/notify.py
+++ b/src/sentry/plugins/bases/notify.py
@@ -11,7 +11,6 @@
from sentry.notifications.services.service import notifications_service
from sentry.notifications.types import NotificationSettingEnum
from sentry.plugins.base import Plugin
-from sentry.plugins.base.configuration import react_plugin_config
from sentry.plugins.base.structs import Notification
from sentry.shared_integrations.exceptions import ApiError
from sentry.types.actor import Actor, ActorType
@@ -46,9 +45,6 @@ class NotificationPlugin(Plugin):
# site_conf_form = NotificationConfigurationForm
project_conf_form: type[forms.Form] = NotificationConfigurationForm
- def configure(self, project, request):
- return react_plugin_config(self, project, request)
-
def get_plugin_type(self):
return "notification"
diff --git a/src/sentry/processing/backpressure/memory.py b/src/sentry/processing/backpressure/memory.py
index 3a336377043ce2..7b2af742fe0597 100644
--- a/src/sentry/processing/backpressure/memory.py
+++ b/src/sentry/processing/backpressure/memory.py
@@ -13,6 +13,8 @@ class ServiceMemory:
used: int
available: int
percentage: float
+ host: str | None = None
+ port: int | None = None
def __init__(self, name: str, used: int, available: int):
self.name = name
@@ -21,6 +23,12 @@ def __init__(self, name: str, used: int, available: int):
self.percentage = used / available
+@dataclass
+class NodeInfo:
+ host: str | None
+ port: int | None
+
+
def query_rabbitmq_memory_usage(host: str) -> ServiceMemory:
"""Returns the currently used memory and the memory limit of a
RabbitMQ host.
@@ -51,6 +59,23 @@ def get_memory_usage(node_id: str, info: Mapping[str, Any]) -> ServiceMemory:
return ServiceMemory(node_id, memory_used, memory_available)
+def get_host_port_info(node_id: str, cluster: Cluster) -> NodeInfo:
+ """
+ Extract the host and port of the redis node in the cluster.
+ """
+ try:
+ if isinstance(cluster, RedisCluster):
+ # RedisCluster node mapping
+ node = cluster.connection_pool.nodes.nodes.get(node_id)
+ return NodeInfo(node["host"], node["port"])
+ else:
+ # rb.Cluster node mapping
+ node = cluster.hosts[node_id]
+ return NodeInfo(node.host, node.port)
+ except Exception:
+ return NodeInfo(None, None)
+
+
def iter_cluster_memory_usage(cluster: Cluster) -> Generator[ServiceMemory, None, None]:
"""
A generator that yields redis `INFO` results for each of the nodes in the `cluster`.
@@ -65,4 +90,8 @@ def iter_cluster_memory_usage(cluster: Cluster) -> Generator[ServiceMemory, None
cluster_info = promise.value
for node_id, info in cluster_info.items():
- yield get_memory_usage(node_id, info)
+ node_info = get_host_port_info(node_id, cluster)
+ memory_usage = get_memory_usage(node_id, info)
+ memory_usage.host = node_info.host
+ memory_usage.port = node_info.port
+ yield memory_usage
diff --git a/src/sentry/processing/backpressure/monitor.py b/src/sentry/processing/backpressure/monitor.py
index f9c233d6dd409a..bcd856c6b00d0d 100644
--- a/src/sentry/processing/backpressure/monitor.py
+++ b/src/sentry/processing/backpressure/monitor.py
@@ -85,10 +85,12 @@ def check_service_health(services: Mapping[str, Service]) -> MutableMapping[str,
reasons = []
logger.info("Checking service `%s` (configured high watermark: %s):", name, high_watermark)
+ memory = None
try:
for memory in check_service_memory(service):
if memory.percentage >= high_watermark:
reasons.append(memory)
+ logger.info("Checking node: %s:%s", memory.host, memory.port)
logger.info(
" name: %s, used: %s, available: %s, percentage: %s",
memory.name,
@@ -101,6 +103,14 @@ def check_service_health(services: Mapping[str, Service]) -> MutableMapping[str,
scope.set_tag("service", name)
sentry_sdk.capture_exception(e)
unhealthy_services[name] = e
+ host = memory.host if memory else "unknown"
+ port = memory.port if memory else "unknown"
+ logger.exception(
+ "Error while processing node %s:%s for service %s",
+ host,
+ port,
+ service,
+ )
else:
unhealthy_services[name] = reasons
diff --git a/src/sentry/profiles/device.py b/src/sentry/profiles/device.py
deleted file mode 100644
index b7f4defa94500d..00000000000000
--- a/src/sentry/profiles/device.py
+++ /dev/null
@@ -1,538 +0,0 @@
-from enum import Enum
-
-GIB = 1024 * 1024 * 1024
-UNKNOWN_DEVICE = "Unknown Device"
-
-
-class DeviceClass(Enum):
- UNCLASSIFIED = 0
- LOW_END = 1
- MID_END = 2
- HIGH_END = 3
-
- def __str__(self) -> str:
- return {0: "unclassified", 1: "low", 2: "mid", 3: "high"}[self.value]
-
-
-class Platform(Enum):
- UNKNOWN = 0
- IOS_DEVICE = 1
- IOS_SIMULATOR = 2
- ANDROID_DEVICE = 3
- ANDROID_EMULATOR = 4
-
-
-# classify_device classifies a device as being low, mid, or high end
-def classify_device(
- model: str,
- os_name: str,
- is_emulator: bool,
- cpu_frequencies: tuple[int] | None = None,
- physical_memory_bytes: int | None = None,
-) -> DeviceClass:
- platform = get_platform(os_name, is_emulator)
- if platform in (Platform.IOS_SIMULATOR, Platform.ANDROID_EMULATOR):
- """
- We exclude simulators/emulators from performance statistics for
- low/mid/high end because these run on arbitrary PC hardware and
- will make our data noisy.
- """
- return DeviceClass.UNCLASSIFIED
-
- if platform == Platform.IOS_DEVICE:
- frequencies = ios_cpu_core_max_frequencies_mhz(model)
- if core_frequency(frequencies) < 2000:
- return DeviceClass.LOW_END # less than 2GHz clock speed
- if core_frequency(frequencies) < 3000:
- return DeviceClass.MID_END # less than 3Ghz clock speed
- return DeviceClass.HIGH_END
-
- if platform == Platform.ANDROID_DEVICE and cpu_frequencies and physical_memory_bytes:
- if number_of_cores(cpu_frequencies) < 8 or physical_memory_bytes < (4 * GIB):
- return DeviceClass.LOW_END # less than 8 cores or less than 4GiB of RAM
- if core_frequency(cpu_frequencies) < 2500:
- return DeviceClass.MID_END # less than 2.5GHz clock speed
- return DeviceClass.HIGH_END
-
- return DeviceClass.UNCLASSIFIED
-
-
-def number_of_cores(frequencies: tuple[int, ...] | None) -> int:
- return len(frequencies) if frequencies is not None else 0
-
-
-def core_frequency(frequencies: tuple[int, ...] | None) -> int:
- return max(frequencies) if frequencies is not None else 0
-
-
-def get_platform(device_os_name: str, is_emulator: bool) -> Platform:
- if device_os_name == "android":
- if is_emulator:
- return Platform.ANDROID_EMULATOR
- return Platform.ANDROID_DEVICE
- if device_os_name in ("iPhone OS", "iOS", "iPadOS", "watchOS", "tvOS"):
- if is_emulator:
- return Platform.IOS_SIMULATOR
- return Platform.IOS_DEVICE
- return Platform.UNKNOWN
-
-
-IPHONE4 = "iPhone 4"
-IPHONE5 = "iPhone 5"
-IPHONE5C = "iPhone 5c"
-IPHONE5S = "iPhone 5s"
-IPHONE7 = "iPhone 7"
-IPHONE7PLUS = "iPhone 7 Plus"
-IPHONE8 = "iPhone 8"
-IPHONE8PLUS = "iPhone 8 Plus"
-IPHONEX = "iPhone X"
-IPHONEXSMAX = "iPhone XS Max"
-
-IPAD2 = "iPad 2"
-IPADGEN3 = "iPad (3rd gen)"
-IPADGEN4 = "iPad (4th gen)"
-IPADGEN5 = "iPad (5th gen)"
-IPADGEN6 = "iPad (6th gen)"
-IPADGEN7 = "iPad (7th gen)"
-IPADGEN8 = "iPad (8th gen)"
-IPADGEN9 = "iPad (9th gen)"
-IPADGEN10 = "iPad (10th gen)"
-
-IPADAIRGEN1 = "iPad Air (1st gen)"
-IPADAIR2 = "iPad Air 2"
-IPADAIRGEN3 = "iPad Air (3rd gen)"
-IPADAIRGEN4 = "iPad Air (4th gen)"
-IPADAIRGEN5 = "iPad Air (5th gen)"
-
-IPADPRO12GEN1 = "iPad Pro (12.9-inch, 1st gen)"
-IPADPRO9GEN1 = "iPad Pro (9.7-inch, 1st gen)"
-IPADPRO12GEN2 = "iPad Pro (12.9-inch, 2nd gen)"
-IPADPRO10 = "iPad Pro (10.5-inch)"
-IPADPRO11GEN1 = "iPad Pro (11-inch, 1st gen)"
-IPADPRO12GEN3 = "iPad Pro (12.9-inch, 3rd gen)"
-IPADPRO11GEN2 = "iPad Pro (11-inch, 2nd gen)"
-IPADPRO12GEN4 = "iPad Pro (12.9-inch, 4th gen)"
-IPADPRO11GEN3 = "iPad Pro (11-inch, 3rd gen)"
-IPADPRO11GEN4 = "iPad Pro (11-inch, 4th gen)"
-IPADPRO12GEN5 = "iPad Pro (12.9-inch, 5th gen)"
-IPADPRO12GEN6 = "iPad Pro (12.9-inch 6th gen)"
-
-IPADMINIGEN1 = "iPad mini (1st gen)"
-IPADMINI2 = "iPad mini 2"
-IPADMINI3 = "iPad mini 3"
-IPADMINI4 = "iPad mini 4"
-IPADMINIGEN5 = "iPad mini (5th gen)"
-IPADMINIGEN6 = "iPad mini (6th gen)"
-
-APPLEWATCHGEN1 = "Apple Watch (1st gen)"
-APPLEWATCHSERIES1 = "Apple Watch Series 1"
-APPLEWATCHSERIES2 = "Apple Watch Series 2"
-APPLEWATCHSERIES3 = "Apple Watch Series 3"
-APPLEWATCHSERIES4 = "Apple Watch Series 4"
-APPLEWATCHSERIES5 = "Apple Watch Series 5"
-APPLEWATCHSE = "Apple Watch SE"
-APPLEWATCHSERIES6 = "Apple Watch Series 6"
-
-APPLETVGEN1 = "Apple TV (1st gen)"
-APPLETVGEN2 = "Apple TV (2nd gen)"
-APPLETVGEN3 = "Apple TV (3rd gen)"
-
-# https:#www.theiphonewiki.com/wiki/Models
-IOS_MODELS: dict[str, str] = {
- "iPhone1,1": "iPhone (1st gen)",
- "iPhone1,2": "iPhone 3G",
- "iPhone2,1": "iPhone 3GS",
- "iPhone3,1": IPHONE4,
- "iPhone3,2": IPHONE4,
- "iPhone3,3": IPHONE4,
- "iPhone4,1": "iPhone 4S",
- "iPhone5,1": IPHONE5,
- "iPhone5,2": IPHONE5,
- "iPhone5,3": IPHONE5C,
- "iPhone5,4": IPHONE5C,
- "iPhone6,1": IPHONE5S,
- "iPhone6,2": IPHONE5S,
- "iPhone7,2": "iPhone 6",
- "iPhone7,1": "iPhone 6 Plus",
- "iPhone8,1": "iPhone 6s",
- "iPhone8,2": "iPhone 6s Plus",
- "iPhone8,4": "iPhone SE (1st gen)",
- "iPhone9,1": IPHONE7,
- "iPhone9,3": IPHONE7,
- "iPhone9,2": IPHONE7PLUS,
- "iPhone9,4": IPHONE7PLUS,
- "iPhone10,1": IPHONE8,
- "iPhone10,4": IPHONE8,
- "iPhone10,2": IPHONE8PLUS,
- "iPhone10,5": IPHONE8PLUS,
- "iPhone10,3": IPHONEX,
- "iPhone10,6": IPHONEX,
- "iPhone11,8": "iPhone XR",
- "iPhone11,2": "iPhone XS",
- "iPhone11,4": IPHONEXSMAX,
- "iPhone11,6": IPHONEXSMAX,
- "iPhone12,1": "iPhone 11",
- "iPhone12,3": "iPhone 11 Pro",
- "iPhone12,5": "iPhone 11 Pro Max",
- "iPhone12,8": "iPhone SE (2nd gen)",
- "iPhone13,1": "iPhone 12 mini",
- "iPhone13,2": "iPhone 12",
- "iPhone13,3": "iPhone 12 Pro",
- "iPhone13,4": "iPhone 12 Pro Max",
- "iPhone14,4": "iPhone 13 mini",
- "iPhone14,5": "iPhone 13",
- "iPhone14,2": "iPhone 13 Pro",
- "iPhone14,3": "iPhone 13 Pro Max",
- "iPhone14,6": "iPhone SE (3rd gen)",
- "iPhone14,7": "iPhone 14",
- "iPhone14,8": "iPhone 14 Plus",
- "iPhone15,2": "iPhone 14 Pro",
- "iPhone15,3": "iPhone 14 Pro Max",
- "iPhone15,4": "iPhone 15",
- "iPhone15,5": "iPhone 15 Plus",
- "iPhone16,1": "iPhone 15 Pro",
- "iPhone16,2": "iPhone 15 Pro Max",
- "iPod1,1": "iPod touch (1st gen)",
- "iPod2,1": "iPod touch (2nd gen)",
- "iPod3,1": "iPod touch (3rd gen)",
- "iPod4,1": "iPod touch (4th gen)",
- "iPod5,1": "iPod touch (5th gen)",
- "iPod7,1": "iPod touch (6th gen)",
- "iPod9,1": "iPod touch (7th gen)",
- "iPad1,1": "iPad (1st gen)",
- "iPad2,1": IPAD2,
- "iPad2,2": IPAD2,
- "iPad2,3": IPAD2,
- "iPad2,4": IPAD2,
- "iPad3,1": IPADGEN3,
- "iPad3,2": IPADGEN3,
- "iPad3,3": IPADGEN3,
- "iPad3,4": IPADGEN4,
- "iPad3,5": IPADGEN4,
- "iPad3,6": IPADGEN4,
- "iPad6,11": IPADGEN5,
- "iPad6,12": IPADGEN5,
- "iPad7,5": IPADGEN6,
- "iPad7,6": IPADGEN6,
- "iPad7,11": IPADGEN7,
- "iPad7,12": IPADGEN7,
- "iPad11,6": IPADGEN8,
- "iPad11,7": IPADGEN8,
- "iPad12,1": IPADGEN9,
- "iPad12,2": IPADGEN9,
- "iPad4,1": IPADAIRGEN1,
- "iPad4,2": IPADAIRGEN1,
- "iPad4,3": IPADAIRGEN1,
- "iPad5,3": IPADAIR2,
- "iPad5,4": IPADAIR2,
- "iPad11,3": IPADAIRGEN3,
- "iPad11,4": IPADAIRGEN3,
- "iPad13,1": IPADAIRGEN4,
- "iPad13,2": IPADAIRGEN4,
- "iPad13,16": IPADAIRGEN5,
- "iPad13,17": IPADAIRGEN5,
- "iPad6,7": IPADPRO12GEN1,
- "iPad6,8": IPADPRO12GEN1,
- "iPad6,3": IPADPRO9GEN1,
- "iPad6,4": IPADPRO9GEN1,
- "iPad7,1": IPADPRO12GEN2,
- "iPad7,2": IPADPRO12GEN2,
- "iPad7,3": IPADPRO10,
- "iPad7,4": IPADPRO10,
- "iPad8,1": IPADPRO11GEN1,
- "iPad8,2": IPADPRO11GEN1,
- "iPad8,3": IPADPRO11GEN1,
- "iPad8,4": IPADPRO11GEN1,
- "iPad8,5": IPADPRO12GEN3,
- "iPad8,6": IPADPRO12GEN3,
- "iPad8,7": IPADPRO12GEN3,
- "iPad8,8": IPADPRO12GEN3,
- "iPad8,9": IPADPRO11GEN2,
- "iPad8,10": IPADPRO11GEN2,
- "iPad8,11": IPADPRO12GEN4,
- "iPad8,12": IPADPRO12GEN4,
- "iPad13,4": IPADPRO11GEN3,
- "iPad13,5": IPADPRO11GEN3,
- "iPad13,6": IPADPRO11GEN3,
- "iPad13,7": IPADPRO11GEN3,
- "iPad13,8": IPADPRO12GEN5,
- "iPad13,9": IPADPRO12GEN5,
- "iPad13,10": IPADPRO12GEN5,
- "iPad13,11": IPADPRO12GEN5,
- "iPad2,5": IPADMINIGEN1,
- "iPad2,6": IPADMINIGEN1,
- "iPad2,7": IPADMINIGEN1,
- "iPad4,4": IPADMINI2,
- "iPad4,5": IPADMINI2,
- "iPad4,6": IPADMINI2,
- "iPad4,7": IPADMINI3,
- "iPad4,8": IPADMINI3,
- "iPad4,9": IPADMINI3,
- "iPad5,1": IPADMINI4,
- "iPad5,2": IPADMINI4,
- "iPad11,1": IPADMINIGEN5,
- "iPad11,2": IPADMINIGEN5,
- "iPad13,18": IPADGEN10,
- "iPad13,19": IPADGEN10,
- "iPad14,1": IPADMINIGEN6,
- "iPad14,2": IPADMINIGEN6,
- "iPad14,3": IPADPRO11GEN4,
- "iPad14,4": IPADPRO11GEN4,
- "iPad14,5": IPADPRO12GEN6,
- "iPad14,6": IPADPRO12GEN6,
- "Watch1,1": APPLEWATCHGEN1,
- "Watch1,2": APPLEWATCHGEN1,
- "Watch2,6": APPLEWATCHSERIES1,
- "Watch2,7": APPLEWATCHSERIES1,
- "Watch2,3": APPLEWATCHSERIES2,
- "Watch2,4": APPLEWATCHSERIES2,
- "Watch3,1": APPLEWATCHSERIES3,
- "Watch3,2": APPLEWATCHSERIES3,
- "Watch3,3": APPLEWATCHSERIES3,
- "Watch3,4": APPLEWATCHSERIES3,
- "Watch4,1": APPLEWATCHSERIES4,
- "Watch4,2": APPLEWATCHSERIES4,
- "Watch4,3": APPLEWATCHSERIES4,
- "Watch4,4": APPLEWATCHSERIES4,
- "Watch5,1": APPLEWATCHSERIES5,
- "Watch5,2": APPLEWATCHSERIES5,
- "Watch5,3": APPLEWATCHSERIES5,
- "Watch5,4": APPLEWATCHSERIES5,
- "Watch5,9": APPLEWATCHSE,
- "Watch5,10": APPLEWATCHSE,
- "Watch5,11": APPLEWATCHSE,
- "Watch5,12": APPLEWATCHSE,
- "Watch6,3": APPLEWATCHSERIES6,
- "Watch6,4": APPLEWATCHSERIES6,
- "AppleTV1,1": "Apple TV (1st gen)",
- "AppleTV2,1": "Apple TV (2nd gen)",
- "AppleTV3,1": APPLETVGEN3,
- "AppleTV3,2": APPLETVGEN3,
- "AppleTV5,3": "Apple TV (4th gen)",
- "AppleTV6,2": "Apple TV 4K",
- "AppleTV11,1": "Apple TV 4K (2nd gen)",
- "i386": "iOS Simulator (i386)",
- "x86_64": "iOS Simulator (x86_64)",
-}
-
-
-def ios_human_readable_model_name(model: str) -> str:
- if model in IOS_MODELS:
- return IOS_MODELS[model]
- if model.startswith("iPhone"):
- return "Unknown iPhone"
- if model.startswith("iPad"):
- return "Unknown iPad"
- if model.startswith("AppleTV"):
- return "Unknown Apple TV"
- if model.startswith("Watch"):
- return "Unknown Apple Watch"
- return "Unknown iOS Device"
-
-
-CPU1 = (520, 520)
-CPU2 = (1000, 1000)
-CPU3 = (1300, 1300)
-CPU4 = (1400, 1400)
-CPU5 = (1500, 1500)
-CPU6 = (1800, 1800)
-CPU7 = (1850, 1850)
-CPU8 = (2160, 2160)
-CPU9 = (2260, 2260)
-CPU10 = (2320, 2320)
-CPU11 = (2340, 2340)
-CPU12 = (1500, 1500, 1500)
-CPU13 = (2380, 2380, 2380, 1300, 1300, 1300)
-CPU14 = (2390, 2390, 1420, 1420, 1420, 1420)
-CPU15 = (2490, 2490, 1587, 1587, 1587, 1587)
-CPU16 = (2650, 2650, 1600, 1600, 1600, 1600)
-CPU17 = (2490, 2490, 2490, 2490, 1587, 1587, 1587, 1587)
-CPU18 = (3100, 3100, 1800, 1800, 1800, 1800)
-CPU19 = (3230, 3230, 1800, 1800, 1800, 1800)
-CPU20 = (2900, 2900, 1800, 1800, 1800, 1800)
-CPU21 = (3200, 3200, 3200, 3200, 2060, 2060, 2060, 2060)
-CPU22 = (3230, 3230, 2020, 2020, 2020, 2020)
-CPU23 = (3460, 3460, 2020, 2020, 2020, 2020)
-
-
-IOS_CPU_FREQUENCIES: dict[str, tuple[int, ...]] = {
- "iPhone1,1": (412,),
- "iPhone1,2": (412,),
- "iPod1,1": (412,),
- "Watch1,1": (520,),
- "Watch1,2": (520,),
- "iPod1,2": (533,),
- "iPhone2,1": (600,),
- "iPod3,1": (600,),
- "iPhone3,1": (800,),
- "iPhone3,2": (800,),
- "iPhone3,3": (800,),
- "iPod4,1": (800,),
- "iPhone4,1": (800,),
- "iPad1,1": (1000,),
- "AppleTV1,1": (1000,),
- "AppleTV2,1": (1000,),
- "AppleTV3,1": (1000,),
- "AppleTV3,2": (1000,),
- "Watch2,6": CPU1,
- "Watch2,7": CPU1,
- "Watch2,3": CPU1,
- "Watch2,4": CPU1,
- # The clock speeds for the Watch3,4,5 have not been published, we only
- # know that they are dual core 64-bit chips. Here we will assume that
- # they use the confirmed clock frequency from the Watch2, but in reality
- # they are likely higher.
- "Watch3,1": CPU1,
- "Watch3,2": CPU1,
- "Watch3,3": CPU1,
- "Watch3,4": CPU1,
- "Watch4,1": CPU1,
- "Watch4,2": CPU1,
- "Watch4,3": CPU1,
- "Watch4,4": CPU1,
- "Watch5,1": CPU1,
- "Watch5,2": CPU1,
- "Watch5,3": CPU1,
- "Watch5,4": CPU1,
- "Watch5,9": CPU2,
- "Watch5,10": CPU2,
- "Watch5,11": CPU2,
- "Watch5,12": CPU2,
- "Watch6,3": CPU2,
- "Watch6,4": CPU2,
- "iPod5,1": (800, 800),
- "iPad2,1": CPU2,
- "iPad2,2": CPU2,
- "iPad2,3": CPU2,
- "iPad2,4": CPU2,
- "iPad2,5": CPU2,
- "iPad2,6": CPU2,
- "iPad2,7": CPU2,
- "iPad3,1": CPU2,
- "iPad3,2": CPU2,
- "iPad3,3": CPU2,
- "iPod7,1": (1100, 1100),
- "iPhone5,1": CPU3,
- "iPhone5,2": CPU3,
- "iPhone5,3": CPU3,
- "iPhone5,4": CPU3,
- "iPhone6,1": CPU3,
- "iPhone6,2": CPU3,
- "iPad4,4": CPU3,
- "iPad4,5": CPU3,
- "iPad4,6": CPU3,
- "iPad4,7": CPU3,
- "iPad4,8": CPU3,
- "iPad4,9": CPU3,
- "iPhone7,1": CPU4,
- "iPhone7,2": CPU4,
- "iPad3,4": CPU4,
- "iPad3,5": CPU4,
- "iPad3,6": CPU4,
- "iPad4,1": CPU4,
- "iPad4,2": CPU4,
- "iPad4,3": CPU4,
- "iPad5,1": CPU5,
- "iPad5,2": CPU5,
- "AppleTV5,3": CPU5,
- "iPod9,1": (1630, 1630),
- "iPad6,11": CPU6,
- "iPad6,12": CPU6,
- "iPhone8,1": CPU7,
- "iPhone8,2": CPU7,
- "iPhone8,4": CPU7,
- "iPad6,3": CPU8,
- "iPad6,4": CPU8,
- "iPad6,7": CPU9,
- "iPad6,8": CPU9,
- "iPad7,11": CPU10,
- "iPad7,12": CPU10,
- "iPad7,5": CPU11,
- "iPad7,6": CPU11,
- "iPhone9,1": CPU11,
- "iPhone9,2": CPU11,
- "iPhone9,3": CPU11,
- "iPhone9,4": CPU11,
- "iPad5,3": CPU12,
- "iPad5,4": CPU12,
- "AppleTV6,2": (2380, 2380, 2380),
- "iPad7,1": CPU13,
- "iPad7,2": CPU13,
- "iPad7,3": CPU13,
- "iPad7,4": CPU13,
- "iPhone10,1": CPU14,
- "iPhone10,2": CPU14,
- "iPhone10,3": CPU14,
- "iPhone10,4": CPU14,
- "iPhone10,5": CPU14,
- "iPhone10,6": CPU14,
- "iPad11,1": CPU15,
- "iPad11,2": CPU15,
- "iPad11,3": CPU15,
- "iPad11,4": CPU15,
- "iPad11,6": CPU15,
- "iPad11,7": CPU15,
- "iPhone11,2": CPU15,
- "iPhone11,4": CPU15,
- "iPhone11,6": CPU15,
- "iPhone11,8": CPU15,
- "AppleTV11,1": CPU15,
- "iPhone12,1": CPU16,
- "iPhone12,3": CPU16,
- "iPhone12,5": CPU16,
- "iPhone12,8": CPU16,
- "iPad12,1": CPU16,
- "iPad12,2": CPU16,
- "iPad8,1": CPU17,
- "iPad8,2": CPU17,
- "iPad8,3": CPU17,
- "iPad8,4": CPU17,
- "iPad8,5": CPU17,
- "iPad8,6": CPU17,
- "iPad8,7": CPU17,
- "iPad8,8": CPU17,
- "iPad8,9": CPU17,
- "iPad8,10": CPU17,
- "iPad8,11": CPU17,
- "iPad8,12": CPU17,
- "iPhone13,1": CPU18,
- "iPhone13,2": CPU18,
- "iPhone13,3": CPU18,
- "iPhone13,4": CPU18,
- "iPad13,1": CPU18,
- "iPad13,2": CPU18,
- "iPhone14,2": CPU19,
- "iPhone14,3": CPU19,
- "iPhone14,4": CPU19,
- "iPhone14,5": CPU19,
- "iPad14,1": CPU20,
- "iPad14,2": CPU20,
- "iPad13,4": CPU21,
- "iPad13,5": CPU21,
- "iPad13,6": CPU21,
- "iPad13,7": CPU21,
- "iPad13,8": CPU21,
- "iPad13,9": CPU21,
- "iPad13,10": CPU21,
- "iPad13,11": CPU21,
- "iPhone14,6": CPU19,
- "iPhone14,7": CPU22,
- "iPhone14,8": CPU22,
- "iPhone15,2": CPU23,
- "iPhone15,3": CPU23,
-}
-
-
-def ios_cpu_core_max_frequencies_mhz(model: str) -> tuple[int, ...] | None:
- if model in IOS_CPU_FREQUENCIES:
- return IOS_CPU_FREQUENCIES[model]
- # New unreleased device, assume device is best of class */
- if model.startswith("iPhone"):
- return CPU19
- if model.startswith("iPad"):
- return CPU21
- if model.startswith("AppleTV"):
- return CPU15
- if model.startswith("Watch"):
- return CPU6
- return None # unknown device
diff --git a/src/sentry/profiles/task.py b/src/sentry/profiles/task.py
index 71cc3c98db3bf2..60debead1d3095 100644
--- a/src/sentry/profiles/task.py
+++ b/src/sentry/profiles/task.py
@@ -1,9 +1,7 @@
from __future__ import annotations
-import random
from copy import deepcopy
from datetime import datetime, timezone
-from functools import lru_cache
from time import time
from typing import Any, TypedDict
from uuid import UUID
@@ -21,8 +19,6 @@
from sentry.models.eventerror import EventError
from sentry.models.organization import Organization
from sentry.models.project import Project
-from sentry.models.projectkey import ProjectKey, UseCase
-from sentry.profiles.device import classify_device
from sentry.profiles.java import (
convert_android_methods_to_jvm_frames,
deobfuscate_signature,
@@ -34,6 +30,7 @@
apply_stack_trace_rules_to_profile,
get_from_profiling_service,
)
+from sentry.search.utils import DEVICE_CLASS
from sentry.signals import first_profile_received
from sentry.silo.base import SiloMode
from sentry.tasks.base import instrumented_task
@@ -41,20 +38,16 @@
from sentry.utils.outcomes import Outcome, track_outcome
from sentry.utils.sdk import set_measurement
-
-class VroomTimeout(Exception):
- pass
+REVERSE_DEVICE_CLASS = {next(iter(tags)): label for label, tags in DEVICE_CLASS.items()}
@instrumented_task(
name="sentry.profiles.task.process_profile",
- queue="profiles.process",
- autoretry_for=(VroomTimeout,), # Retry when vroom returns a GCS timeout
retry_backoff=True,
- retry_backoff_max=60, # up to 1 min
+ retry_backoff_max=20,
retry_jitter=True,
default_retry_delay=5, # retries after 5s
- max_retries=5,
+ max_retries=2,
acks_late=True,
task_time_limit=60,
task_acks_on_failure_or_timeout=False,
@@ -158,27 +151,6 @@ def process_profile_task(
set_measurement("profile.stacks.processed", len(profile["profile"]["stacks"]))
set_measurement("profile.frames.processed", len(profile["profile"]["frames"]))
- if (
- profile.get("version") in ["1", "2"]
- and options.get("profiling.generic_metrics.functions_ingestion.enabled")
- and (
- organization.id
- in options.get("profiling.generic_metrics.functions_ingestion.allowed_org_ids")
- or random.random()
- < options.get("profiling.generic_metrics.functions_ingestion.rollout_rate")
- )
- and project.id
- not in options.get("profiling.generic_metrics.functions_ingestion.denied_proj_ids")
- ):
- try:
- with metrics.timer("process_profile.get_metrics_dsn"):
- dsn = get_metrics_dsn(project.id)
- profile["options"] = {
- "dsn": dsn,
- }
- except Exception as e:
- sentry_sdk.capture_exception(e)
-
if options.get("profiling.stack_trace_rules.enabled"):
try:
with metrics.timer("process_profile.apply_stack_trace_rules"):
@@ -373,34 +345,12 @@ def _normalize(profile: Profile, organization: Organization) -> None:
if platform not in {"cocoa", "android"} or version == "2":
return
- classification_options = dict()
-
- if platform == "android":
- classification_options.update(
- {
- "cpu_frequencies": profile["device_cpu_frequencies"],
- "physical_memory_bytes": profile["device_physical_memory_bytes"],
- }
- )
+ classification = profile.get("transaction_tags", {}).get("device.class", None)
- if version == "1":
- classification_options.update(
- {
- "model": profile["device"]["model"],
- "os_name": profile["os"]["name"],
- "is_emulator": profile["device"]["is_emulator"],
- }
- )
- elif version is None:
- classification_options.update(
- {
- "model": profile["device_model"],
- "os_name": profile["device_os_name"],
- "is_emulator": profile["device_is_emulator"],
- }
- )
+ if not classification:
+ return
- classification = str(classify_device(**classification_options))
+ classification = REVERSE_DEVICE_CLASS.get(classification, "unknown")
if version == "1":
profile["device"]["classification"] = classification
@@ -515,7 +465,10 @@ def symbolicate(
classes=[],
)
return symbolicator.process_payload(
- platform=platform, stacktraces=stacktraces, modules=modules, apply_source_context=False
+ platform=platform,
+ stacktraces=stacktraces,
+ modules=modules,
+ apply_source_context=False,
)
@@ -956,29 +909,27 @@ def _insert_vroom_profile(profile: Profile) -> bool:
path = "/chunk" if "profiler_id" in profile else "/profile"
response = get_from_profiling_service(method="POST", path=path, json_data=profile)
+ sentry_sdk.set_tag("vroom.response.status_code", str(response.status))
+
+ reason = "bad status"
+
if response.status == 204:
return True
elif response.status == 429:
- raise VroomTimeout
+ reason = "gcs timeout"
elif response.status == 412:
- metrics.incr(
- "process_profile.insert_vroom_profile.error",
- tags={
- "platform": profile["platform"],
- "reason": "duplicate profile",
- },
- sample_rate=1.0,
- )
- return False
- else:
- metrics.incr(
- "process_profile.insert_vroom_profile.error",
- tags={"platform": profile["platform"], "reason": "bad status"},
- sample_rate=1.0,
- )
- return False
- except VroomTimeout:
- raise
+ reason = "duplicate profile"
+
+ metrics.incr(
+ "process_profile.insert_vroom_profile.error",
+ tags={
+ "platform": profile["platform"],
+ "reason": reason,
+ "status_code": response.status,
+ },
+ sample_rate=1.0,
+ )
+ return False
except Exception as e:
sentry_sdk.capture_exception(e)
metrics.incr(
@@ -1022,22 +973,6 @@ class _ProjectKeyKwargs(TypedDict):
use_case: str
-@lru_cache(maxsize=100)
-def get_metrics_dsn(project_id: int) -> str:
- kwargs: _ProjectKeyKwargs = {
- "project_id": project_id,
- "use_case": UseCase.PROFILING.value,
- }
- try:
- project_key, _ = ProjectKey.objects.get_or_create(**kwargs)
- except ProjectKey.MultipleObjectsReturned:
- # See https://docs.djangoproject.com/en/5.0/ref/models/querysets/#get-or-create
- project_key_first = ProjectKey.objects.filter(**kwargs).order_by("pk").first()
- assert project_key_first is not None
- project_key = project_key_first
- return project_key.get_dsn(public=True)
-
-
@metrics.wraps("process_profile.track_outcome")
def _track_duration_outcome(
profile: Profile,
diff --git a/src/sentry/queue/routers.py b/src/sentry/queue/routers.py
index b1ad6081ec9f5a..00b091b8586dc5 100644
--- a/src/sentry/queue/routers.py
+++ b/src/sentry/queue/routers.py
@@ -127,14 +127,8 @@ def route_for_queue(self, queue: str) -> str:
if random.random() >= rollout_rate:
return queue
- if queue in set(options.get("celery_split_queue_legacy_mode")):
- # Use legacy route
- # This router required to define the routing logic inside the
- # settings file.
- return settings.SENTRY_POST_PROCESS_QUEUE_SPLIT_ROUTER.get(queue, lambda: queue)()
+ router = self.__queue_routers.get(queue)
+ if router is not None:
+ return next(router)
else:
- router = self.__queue_routers.get(queue)
- if router is not None:
- return next(router)
- else:
- return queue
+ return queue
diff --git a/src/sentry/quotas/base.py b/src/sentry/quotas/base.py
index 7e1f7a4d15ea9a..e236126a719c36 100644
--- a/src/sentry/quotas/base.py
+++ b/src/sentry/quotas/base.py
@@ -457,6 +457,12 @@ def get_abuse_quotas(self, org):
categories=[DataCategory.ATTACHMENT],
scope=QuotaScope.PROJECT,
),
+ AbuseQuota(
+ id="paai",
+ option="project-abuse-quota.attachment-item-limit",
+ categories=[DataCategory.ATTACHMENT_ITEM],
+ scope=QuotaScope.PROJECT,
+ ),
AbuseQuota(
id="pas",
option="project-abuse-quota.session-limit",
diff --git a/src/sentry/ratelimits/base.py b/src/sentry/ratelimits/base.py
index fb5fe63893a64b..b43b296d7a3625 100644
--- a/src/sentry/ratelimits/base.py
+++ b/src/sentry/ratelimits/base.py
@@ -31,3 +31,6 @@ def is_limited_with_value(
def validate(self) -> None:
raise NotImplementedError
+
+ def reset(self, key: str, project: Project | None = None, window: int | None = None) -> None:
+ return
diff --git a/src/sentry/ratelimits/redis.py b/src/sentry/ratelimits/redis.py
index d96c02c8fe0efd..2d43855ae93be3 100644
--- a/src/sentry/ratelimits/redis.py
+++ b/src/sentry/ratelimits/redis.py
@@ -120,3 +120,7 @@ def is_limited_with_value(
return False, 0, reset_time
return result > limit, result, reset_time
+
+ def reset(self, key: str, project: Project | None = None, window: int | None = None) -> None:
+ redis_key = self._construct_redis_key(key, project=project, window=window)
+ self.client.delete(redis_key)
diff --git a/src/sentry/receivers/onboarding.py b/src/sentry/receivers/onboarding.py
index 97d8976ebcba98..c363a085be319a 100644
--- a/src/sentry/receivers/onboarding.py
+++ b/src/sentry/receivers/onboarding.py
@@ -69,9 +69,8 @@ def record_new_project(project, user=None, user_id=None, **kwargs):
else:
user_id = None
try:
- default_user_id = (
- Organization.objects.get(id=project.organization_id).get_default_owner().id
- )
+ default_user = Organization.objects.get(id=project.organization_id).get_default_owner()
+ default_user_id = default_user.id
except IndexError:
logger.warning(
"Cannot initiate onboarding for organization (%s) due to missing owners",
@@ -113,6 +112,13 @@ def record_new_project(project, user=None, user_id=None, **kwargs):
),
project_id=project.id,
)
+ analytics.record(
+ "second_platform.added",
+ user_id=default_user_id,
+ organization_id=project.organization_id,
+ project_id=project.id,
+ )
+ try_mark_onboarding_complete(project.organization_id, user)
@first_event_received.connect(weak=False)
@@ -641,8 +647,6 @@ def record_plugin_enabled(plugin, project, user, **kwargs):
@alert_rule_created.connect(weak=False)
def record_alert_rule_created(user, project: Project, rule_type: str, **kwargs):
- # NOTE: This intentionally does not fire for the default issue alert rule
- # that gets created on new project creation.
task = OnboardingTask.METRIC_ALERT if rule_type == "metric" else OnboardingTask.ALERT_RULE
rows_affected, created = OrganizationOnboardingTask.objects.create_or_update(
organization_id=project.organization_id,
@@ -735,6 +739,7 @@ def record_integration_added(
task=task_mapping[integration_type],
status=OnboardingTaskStatus.COMPLETE,
)
+ try_mark_onboarding_complete(organization_id, user)
else:
task = OrganizationOnboardingTask.objects.filter(
organization_id=organization_id,
diff --git a/src/sentry/receivers/rules.py b/src/sentry/receivers/rules.py
index e024ff4fab2410..c07b27c62fe0e1 100644
--- a/src/sentry/receivers/rules.py
+++ b/src/sentry/receivers/rules.py
@@ -1,7 +1,13 @@
+import logging
+
+from sentry import features
from sentry.models.project import Project
from sentry.models.rule import Rule
from sentry.notifications.types import FallthroughChoiceType
-from sentry.signals import project_created
+from sentry.signals import alert_rule_created, project_created
+from sentry.users.services.user.model import RpcUser
+
+logger = logging.getLogger("sentry")
DEFAULT_RULE_LABEL = "Send a notification for high priority issues"
DEFAULT_RULE_ACTIONS = [
@@ -31,7 +37,29 @@ def create_default_rules(project: Project, default_rules=True, RuleModel=Rule, *
return
rule_data = DEFAULT_RULE_DATA
- RuleModel.objects.create(project=project, label=DEFAULT_RULE_LABEL, data=rule_data)
+ rule = RuleModel.objects.create(project=project, label=DEFAULT_RULE_LABEL, data=rule_data)
+
+ try:
+ user: RpcUser = project.organization.get_default_owner()
+ except IndexError:
+ logger.warning(
+ "Cannot record default rule created for organization (%s) due to missing owners",
+ project.organization_id,
+ )
+ return
+
+ if features.has("organizations:quick-start-updates", project.organization, actor=user):
+ # When a user creates a new project and opts to set up an issue alert within it,
+ # the corresponding task in the quick start sidebar is automatically marked as complete.
+ alert_rule_created.send(
+ user=user,
+ project=project,
+ rule_id=rule.id,
+ # The default rule created within a new project is always of type 'issue'
+ rule_type="issue",
+ sender=type(project),
+ is_api_token=False,
+ )
project_created.connect(create_default_rules, dispatch_uid="create_default_rules", weak=False)
diff --git a/src/sentry/relay/globalconfig.py b/src/sentry/relay/globalconfig.py
index 6e58750ec3bdfa..b992fb8fb6802b 100644
--- a/src/sentry/relay/globalconfig.py
+++ b/src/sentry/relay/globalconfig.py
@@ -15,7 +15,6 @@
"profiling.profile_metrics.unsampled_profiles.platforms",
"profiling.profile_metrics.unsampled_profiles.sample_rate",
"profiling.profile_metrics.unsampled_profiles.enabled",
- "profiling.generic_metrics.functions_ingestion.enabled",
"relay.span-usage-metric",
"relay.cardinality-limiter.mode",
"relay.cardinality-limiter.error-sample-rate",
@@ -23,7 +22,6 @@
"relay.metric-bucket-distribution-encodings",
"relay.metric-stats.rollout-rate",
"relay.span-extraction.sample-rate",
- "relay.compute-metrics-summaries.sample-rate",
"relay.span-normalization.allowed_hosts",
]
diff --git a/src/sentry/release_health/base.py b/src/sentry/release_health/base.py
index d9fd7530fb122b..9e747ce4ea608e 100644
--- a/src/sentry/release_health/base.py
+++ b/src/sentry/release_health/base.py
@@ -243,14 +243,10 @@ class ReleaseHealthBackend(Service):
"get_project_releases_count",
"get_project_release_stats",
"get_project_sessions_count",
- "is_metrics_based",
"get_num_sessions_per_project",
"get_project_releases_by_stability",
)
- def is_metrics_based(self) -> bool:
- return False
-
def get_current_and_previous_crash_free_rates(
self,
project_ids: Sequence[ProjectId],
diff --git a/src/sentry/release_health/metrics.py b/src/sentry/release_health/metrics.py
index 9be08a6101b290..2b0229516e4963 100644
--- a/src/sentry/release_health/metrics.py
+++ b/src/sentry/release_health/metrics.py
@@ -173,9 +173,6 @@ def _get_crash_free_rate_data(
result_groups=result_groups
)
- def is_metrics_based(self) -> bool:
- return True
-
def get_current_and_previous_crash_free_rates(
self,
project_ids: Sequence[int],
diff --git a/src/sentry/remote_subscriptions/consumers/result_consumer.py b/src/sentry/remote_subscriptions/consumers/result_consumer.py
index 6e7ddcdf99b6c7..51958ee6f4a969 100644
--- a/src/sentry/remote_subscriptions/consumers/result_consumer.py
+++ b/src/sentry/remote_subscriptions/consumers/result_consumer.py
@@ -2,17 +2,23 @@
import abc
import logging
+from collections import defaultdict
from collections.abc import Mapping
-from typing import Generic, TypeVar
+from concurrent.futures import ThreadPoolExecutor, wait
+from typing import Generic, Literal, TypeVar
+import sentry_sdk
from arroyo.backends.kafka.consumer import KafkaPayload
+from arroyo.processing.strategies import BatchStep
from arroyo.processing.strategies.abstract import ProcessingStrategy, ProcessingStrategyFactory
+from arroyo.processing.strategies.batching import ValuesBatch
from arroyo.processing.strategies.commit import CommitOffsets
from arroyo.processing.strategies.run_task import RunTask
-from arroyo.types import Commit, FilteredPayload, Message, Partition
+from arroyo.types import BrokerValue, Commit, FilteredPayload, Message, Partition
from sentry.conf.types.kafka_definition import Topic, get_topic_codec
from sentry.remote_subscriptions.models import BaseRemoteSubscription
+from sentry.utils import metrics
logger = logging.getLogger(__name__)
@@ -54,7 +60,40 @@ def handle_result(self, subscription: U | None, result: T):
class ResultsStrategyFactory(ProcessingStrategyFactory[KafkaPayload], Generic[T, U]):
- def __init__(self) -> None:
+ parallel_executor: ThreadPoolExecutor | None = None
+
+ parallel = False
+ """
+ Does the consumer process unrelated messages in parallel?
+ """
+
+ max_batch_size = 500
+ """
+ How many messages will be batched at once when in parallel mode.
+ """
+
+ max_batch_time = 10
+ """
+ The maximum time in seconds to accumulate a bach of check-ins.
+ """
+
+ def __init__(
+ self,
+ mode: Literal["parallel", "serial"] = "serial",
+ max_batch_size: int | None = None,
+ max_batch_time: int | None = None,
+ max_workers: int | None = None,
+ ) -> None:
+ self.mode = mode
+ if mode == "parallel":
+ self.parallel = True
+ self.parallel_executor = ThreadPoolExecutor(max_workers=max_workers)
+
+ if max_batch_size is not None:
+ self.max_batch_size = max_batch_size
+ if max_batch_time is not None:
+ self.max_batch_time = max_batch_time
+
self.result_processor = self.result_processor_cls()
self.codec = get_topic_codec(self.topic_for_codec)
@@ -68,6 +107,26 @@ def topic_for_codec(self) -> Topic:
def result_processor_cls(self) -> type[ResultProcessor[T, U]]:
pass
+ @abc.abstractmethod
+ def build_payload_grouping_key(self, result: T) -> str:
+ """
+ Used in parallel processing mode. This method should return a string used to
+ group related results together for serial processing.
+ """
+ pass
+
+ @property
+ @abc.abstractmethod
+ def identifier(self) -> str:
+ """
+ A unique identifier for this consumer - used to differentiate it in stats
+ """
+ pass
+
+ def shutdown(self) -> None:
+ if self.parallel_executor:
+ self.parallel_executor.shutdown()
+
def decode_payload(self, payload: KafkaPayload | FilteredPayload) -> T | None:
assert not isinstance(payload, FilteredPayload)
try:
@@ -79,20 +138,95 @@ def decode_payload(self, payload: KafkaPayload | FilteredPayload) -> T | None:
)
return None
+ def create_with_partitions(
+ self,
+ commit: Commit,
+ partitions: Mapping[Partition, int],
+ ) -> ProcessingStrategy[KafkaPayload]:
+ if self.parallel:
+ return self.create_thread_parallel_worker(commit)
+ else:
+ return self.create_serial_worker(commit)
+
+ def create_serial_worker(self, commit: Commit) -> ProcessingStrategy[KafkaPayload]:
+ return RunTask(
+ function=self.process_single,
+ next_step=CommitOffsets(commit),
+ )
+
def process_single(self, message: Message[KafkaPayload | FilteredPayload]):
result = self.decode_payload(message.payload)
if result is not None:
self.result_processor(result)
- def create_serial_worker(self, commit: Commit) -> ProcessingStrategy[KafkaPayload]:
- return RunTask(
- function=self.process_single,
+ def create_thread_parallel_worker(self, commit: Commit) -> ProcessingStrategy[KafkaPayload]:
+ assert self.parallel_executor is not None
+ batch_processor = RunTask(
+ function=self.process_batch,
next_step=CommitOffsets(commit),
)
+ return BatchStep(
+ max_batch_size=self.max_batch_size,
+ max_batch_time=self.max_batch_time,
+ next_step=batch_processor,
+ )
- def create_with_partitions(
- self,
- commit: Commit,
- partitions: Mapping[Partition, int],
- ) -> ProcessingStrategy[KafkaPayload]:
- return self.create_serial_worker(commit)
+ def partition_message_batch(self, message: Message[ValuesBatch[KafkaPayload]]) -> list[list[T]]:
+ """
+ Takes a batch of messages and partitions them based on the `build_payload_grouping_key` method.
+ Returns a generator that yields each partitioned list of messages.
+ """
+ batch = message.payload
+
+ batch_mapping: Mapping[str, list[T]] = defaultdict(list)
+ for item in batch:
+ assert isinstance(item, BrokerValue)
+
+ result = self.decode_payload(item.payload)
+ if result is None:
+ continue
+
+ key = self.build_payload_grouping_key(result)
+ batch_mapping[key].append(result)
+
+ # Number of messages that are being processed in this batch
+ metrics.gauge(
+ "remote_subscriptions.result_consumer.parallel_batch_count",
+ len(batch),
+ tags={"identifier": self.identifier, "mode": self.mode},
+ )
+ # Number of groups we've collected to be processed in parallel
+ metrics.gauge(
+ "remote_subscriptions.result_consumer.parallel_batch_groups",
+ len(batch_mapping),
+ tags={"identifier": self.identifier, "mode": self.mode},
+ )
+
+ return list(batch_mapping.values())
+
+ def process_batch(self, message: Message[ValuesBatch[KafkaPayload]]):
+ """
+ Receives batches of messages. This function will take the batch and group them together
+ using `build_payload_grouping_key`, which ensures order is preserved. Each group is then
+ executed using a ThreadPoolWorker.
+
+ By batching we're able to process messages in parallel while guaranteeing that no messages
+ are processed out of order.
+ """
+ assert self.parallel_executor is not None
+ partitioned_values = self.partition_message_batch(message)
+
+ # Submit groups for processing
+ with sentry_sdk.start_transaction(op="process_batch", name="monitors.monitor_consumer"):
+ futures = [
+ self.parallel_executor.submit(self.process_group, group)
+ for group in partitioned_values
+ ]
+ wait(futures)
+
+ def process_group(self, items: list[T]):
+ """
+ Process a group of related messages serially.
+ """
+ for item in items:
+ self.result_processor(item)
diff --git a/src/sentry/replays/lib/new_query/parsers.py b/src/sentry/replays/lib/new_query/parsers.py
index 4229fa491f1b79..092f37862b6479 100644
--- a/src/sentry/replays/lib/new_query/parsers.py
+++ b/src/sentry/replays/lib/new_query/parsers.py
@@ -19,7 +19,10 @@ def parse_float(value: str) -> float:
def parse_int(value: str) -> int:
"""Coerce to int or fail."""
- return int(parse_float(value))
+ try:
+ return int(parse_float(value))
+ except (ValueError, CouldNotParseValue):
+ raise CouldNotParseValue("Failed to parse int.")
def parse_duration(value: str) -> int:
@@ -30,7 +33,6 @@ def parse_duration(value: str) -> int:
milliseconds = parse_int(value)
if milliseconds % 1000:
# TODO: remove once we support milliseconds.
- # TODO: this error isn't actually returned to the frontend, it's caught and then we raise a ParseError
raise CouldNotParseValue(
f"Replays only supports second-resolution timestamps at this time. Try '{milliseconds // 1000}s' instead."
)
diff --git a/src/sentry/replays/usecases/query/__init__.py b/src/sentry/replays/usecases/query/__init__.py
index 986c594d59a742..96ccd3ec4d0958 100644
--- a/src/sentry/replays/usecases/query/__init__.py
+++ b/src/sentry/replays/usecases/query/__init__.py
@@ -103,14 +103,19 @@ def handle_search_filters(
# are top level filters they are implicitly AND'ed in the WHERE/HAVING clause. Otherwise
# explicit operators are used.
if isinstance(search_filter, SearchFilter):
+
try:
condition = search_filter_to_condition(search_config, search_filter)
if condition is None:
raise ParseError(f"Unsupported search field: {search_filter.key.name}")
except OperatorNotSupported:
raise ParseError(f"Invalid operator specified for `{search_filter.key.name}`")
- except CouldNotParseValue:
- raise ParseError(f"Could not parse value for `{search_filter.key.name}`")
+ except CouldNotParseValue as e:
+ err_msg = f"Could not parse value for `{search_filter.key.name}`."
+ if e.args and e.args[0]:
+ # avoid using str(e) as it may expose stack trace info
+ err_msg += f" Detail: {e.args[0]}"
+ raise ParseError(err_msg)
if look_back == "AND":
look_back = None
diff --git a/src/sentry/rules/match.py b/src/sentry/rules/match.py
index a21a249228cf44..9fca4e945c50ca 100644
--- a/src/sentry/rules/match.py
+++ b/src/sentry/rules/match.py
@@ -1,8 +1,9 @@
from collections.abc import Iterable
+from enum import StrEnum
from typing import Any
-class MatchType:
+class MatchType(StrEnum):
CONTAINS = "co"
ENDS_WITH = "ew"
EQUAL = "eq"
diff --git a/src/sentry/runner/commands/devserver.py b/src/sentry/runner/commands/devserver.py
index 5a2b9a947a8f4a..219a9f7dc8be8d 100644
--- a/src/sentry/runner/commands/devserver.py
+++ b/src/sentry/runner/commands/devserver.py
@@ -30,6 +30,7 @@
"worker": ["sentry", "run", "worker", "-c", "1", "--autoreload"],
"celery-beat": ["sentry", "run", "cron", "--autoreload"],
"server": ["sentry", "run", "web"],
+ "taskworker": ["sentry", "run", "taskworker"],
}
_SUBSCRIPTION_RESULTS_CONSUMERS = [
@@ -138,6 +139,11 @@ def _get_daemon(name: str) -> tuple[str, list[str]]:
type=click.Choice(["control", "region"]),
help="The silo mode to run this devserver instance in. Choices are control, region, none",
)
+@click.option(
+ "--taskworker/--no-taskworker",
+ default=False,
+ help="Run kafka-based task workers",
+)
@click.argument(
"bind",
default=None,
@@ -164,6 +170,7 @@ def devserver(
client_hostname: str,
ngrok: str | None,
silo: str | None,
+ taskworker: bool,
) -> NoReturn:
"Starts a lightweight web server for development."
if bind is None:
@@ -286,6 +293,9 @@ def devserver(
click.echo("--ingest was provided, implicitly enabling --workers")
workers = True
+ if taskworker:
+ daemons.append(_get_daemon("taskworker"))
+
if workers and not celery_beat:
click.secho(
"If you want to run periodic tasks from celery (celerybeat), you need to also pass --celery-beat.",
@@ -354,10 +364,19 @@ def devserver(
# Create all topics if the Kafka eventstream is selected
if kafka_consumers:
- kafka_container_name = (
- "kafka-kafka-1" if os.environ.get("USE_NEW_DEVSERVICES") == "1" else "sentry_kafka"
+ use_new_devservices = os.environ.get("USE_NEW_DEVSERVICES") == "1"
+ valid_kafka_container_names = ["kafka-kafka-1", "sentry_kafka"]
+ kafka_container_name = "kafka-kafka-1" if use_new_devservices else "sentry_kafka"
+ kafka_container_warning_message = (
+ f"""
+Devserver is configured to work with the revamped devservices. Looks like the `{kafka_container_name}` container is not running.
+Please run `devservices up` to start it. If you would like to use devserver with `sentry devservices`, set `USE_NEW_DEVSERVICES=0` in your environment."""
+ if use_new_devservices
+ else f"""
+Devserver is configured to work with `sentry devservices`. Looks like the `{kafka_container_name}` container is not running.
+Please run `sentry devservices up kafka` to start it. If you would like to use devserver with the revamped devservices, set `USE_NEW_DEVSERVICES=1` in your environment."""
)
- if kafka_container_name not in containers:
+ if not any(name in containers for name in valid_kafka_container_names):
raise click.ClickException(
f"""
Devserver is configured to start some kafka consumers, but Kafka
@@ -373,7 +392,7 @@ def devserver(
SENTRY_EVENTSTREAM = "sentry.eventstream.kafka.KafkaEventStream"
-and run `sentry devservices up kafka`.
+{kafka_container_warning_message}
Alternatively, run without --workers.
"""
diff --git a/src/sentry/runner/commands/devservices.py b/src/sentry/runner/commands/devservices.py
index 5b0b91f5a7782c..d4d2d7dfca3f62 100644
--- a/src/sentry/runner/commands/devservices.py
+++ b/src/sentry/runner/commands/devservices.py
@@ -303,6 +303,21 @@ def up(
"""
from sentry.runner import configure
+ if os.environ.get("USE_NEW_DEVSERVICES", "0") != "1":
+ click.secho(
+ """
+WARNING: We're transitioning from `sentry devservices` to the new and improved `devservices` in January 2025.
+To give the new devservices a try, set the `USE_NEW_DEVSERVICES` environment variable to `1`. For a full list of commands, see
+https://github.com/getsentry/devservices?tab=readme-ov-file#commands
+
+Instead of running `sentry devservices up`, consider using `devservices up`.
+For Sentry employees - if you hit any bumps or have feedback, we'd love to hear from you in #discuss-dev-infra.
+Thanks for helping the Dev Infra team improve this experience!
+
+ """,
+ fg="yellow",
+ )
+
configure()
containers = _prepare_containers(
@@ -520,6 +535,21 @@ def down(project: str, service: list[str]) -> None:
an explicit list of services to bring down.
"""
+ if os.environ.get("USE_NEW_DEVSERVICES", "0") != "1":
+ click.secho(
+ """
+WARNING: We're transitioning from `sentry devservices` to the new and improved `devservices` in January 2025.
+To give the new devservices a try, set the `USE_NEW_DEVSERVICES` environment variable to `1`. For a full list of commands, see
+https://github.com/getsentry/devservices?tab=readme-ov-file#commands
+
+Instead of running `sentry devservices down`, consider using `devservices down`.
+For Sentry employees - if you hit any bumps or have feedback, we'd love to hear from you in #discuss-dev-infra.
+Thanks for helping the Dev Infra team improve this experience!
+
+ """,
+ fg="yellow",
+ )
+
def _down(container: docker.models.containers.Container) -> None:
click.secho(f"> Stopping '{container.name}' container", fg="red")
container.stop()
diff --git a/src/sentry/runner/commands/run.py b/src/sentry/runner/commands/run.py
index 73814b5e4e77d6..75679d41fc50d2 100644
--- a/src/sentry/runner/commands/run.py
+++ b/src/sentry/runner/commands/run.py
@@ -7,6 +7,7 @@
from typing import Any
import click
+from django.utils import autoreload
from sentry.bgtasks.api import managed_bgtasks
from sentry.runner.decorators import configuration, log_options
@@ -230,8 +231,6 @@ def worker(ignore_unknown_queues: bool, **options: Any) -> None:
raise click.ClickException(message)
if options["autoreload"]:
- from django.utils import autoreload
-
autoreload.run_with_reloader(run_worker, **options)
else:
run_worker(**options)
@@ -243,13 +242,33 @@ def worker(ignore_unknown_queues: bool, **options: Any) -> None:
@click.option(
"--max-task-count", help="Number of tasks this worker should run before exiting", default=10000
)
+@click.option(
+ "--namespace", help="The dedicated task namespace that taskworker operates on", default=None
+)
@log_options()
@configuration
-def taskworker(rpc_host: str, max_task_count: int, **options: Any) -> None:
+def taskworker(**options: Any) -> None:
+ """
+ Run a taskworker worker
+ """
+ if options["autoreload"]:
+ autoreload.run_with_reloader(run_taskworker, **options)
+ else:
+ run_taskworker(**options)
+
+
+def run_taskworker(
+ rpc_host: str, max_task_count: int, namespace: str | None, **options: Any
+) -> None:
+ """
+ taskworker factory that can be reloaded
+ """
from sentry.taskworker.worker import TaskWorker
with managed_bgtasks(role="taskworker"):
- worker = TaskWorker(rpc_host=rpc_host, max_task_count=max_task_count, **options)
+ worker = TaskWorker(
+ rpc_host=rpc_host, max_task_count=max_task_count, namespace=namespace, **options
+ )
exitcode = worker.start()
raise SystemExit(exitcode)
@@ -397,6 +416,11 @@ def cron(**options: Any) -> None:
is_flag=True,
default=True,
)
+@click.option(
+ "--stale-threshold-sec",
+ type=click.IntRange(min=300),
+ help="Routes stale messages to stale topic if provided. This feature is currently being tested, do not pass in production yet.",
+)
@click.option(
"--log-level",
type=click.Choice(["debug", "info", "warning", "error", "critical"], case_sensitive=False),
@@ -481,6 +505,7 @@ def dev_consumer(consumer_names: tuple[str, ...]) -> None:
synchronize_commit_group=None,
synchronize_commit_log_topic=None,
enable_dlq=False,
+ stale_threshold_sec=None,
healthcheck_file_path=None,
enforce_schema=True,
)
diff --git a/src/sentry/search/eap/columns.py b/src/sentry/search/eap/columns.py
index 58245dc36b872e..13b3f7467a8095 100644
--- a/src/sentry/search/eap/columns.py
+++ b/src/sentry/search/eap/columns.py
@@ -14,6 +14,7 @@
from sentry.exceptions import InvalidSearchQuery
from sentry.search.eap import constants
+from sentry.search.events.constants import SPAN_MODULE_CATEGORY_VALUES
from sentry.search.events.types import SnubaParams
from sentry.search.utils import DEVICE_CLASS
from sentry.utils.validators import is_event_id, is_span_id
@@ -246,6 +247,11 @@ def datetime_processor(datetime_string: str) -> str:
internal_name="sentry.status",
search_type="string",
),
+ ResolvedColumn(
+ public_alias="span.status_code",
+ internal_name="sentry.status_code",
+ search_type="string",
+ ),
ResolvedColumn(
public_alias="trace",
internal_name="sentry.trace_id",
@@ -321,7 +327,6 @@ def datetime_processor(datetime_string: str) -> str:
simple_sentry_field("release"),
simple_sentry_field("sdk.name"),
simple_sentry_field("sdk.version"),
- simple_sentry_field("span.status_code"),
simple_sentry_field("span_id"),
simple_sentry_field("trace.status"),
simple_sentry_field("transaction.method"),
@@ -430,11 +435,21 @@ def device_class_context_constructor(params: SnubaParams) -> VirtualColumnContex
)
+def module_context_constructor(params: SnubaParams) -> VirtualColumnContext:
+ value_map = {key: key for key in SPAN_MODULE_CATEGORY_VALUES}
+ return VirtualColumnContext(
+ from_column_name="sentry.category",
+ to_column_name="span.module",
+ value_map=value_map,
+ )
+
+
VIRTUAL_CONTEXTS = {
"project": project_context_constructor("project"),
"project.slug": project_context_constructor("project.slug"),
"project.name": project_context_constructor("project.name"),
"device.class": device_class_context_constructor,
+ "span.module": module_context_constructor,
}
diff --git a/src/sentry/search/eap/constants.py b/src/sentry/search/eap/constants.py
index de195272b869ff..a802707fec2401 100644
--- a/src/sentry/search/eap/constants.py
+++ b/src/sentry/search/eap/constants.py
@@ -44,8 +44,8 @@
}
# https://github.com/getsentry/snuba/blob/master/snuba/web/rpc/v1/endpoint_time_series.py
-# The RPC limits us to 1000 points per timeseries
-MAX_ROLLUP_POINTS = 1000
+# The RPC limits us to 2016 points per timeseries
+MAX_ROLLUP_POINTS = 2016
# Copied from snuba, a number of total seconds
VALID_GRANULARITIES = frozenset(
{
@@ -55,6 +55,7 @@
2 * 60,
5 * 60,
10 * 60,
+ 15 * 60,
30 * 60, # minutes
1 * 3600,
3 * 3600,
diff --git a/src/sentry/search/eap/spans.py b/src/sentry/search/eap/spans.py
index e1b477bfc528fb..601cf4b465b843 100644
--- a/src/sentry/search/eap/spans.py
+++ b/src/sentry/search/eap/spans.py
@@ -4,6 +4,7 @@
from re import Match
from typing import cast
+import sentry_sdk
from parsimonious.exceptions import ParseError
from sentry_protos.snuba.v1.request_common_pb2 import RequestMeta
from sentry_protos.snuba.v1.trace_item_attribute_pb2 import (
@@ -54,9 +55,13 @@ class SearchResolver:
field(default_factory=dict)
)
+ @sentry_sdk.trace
def resolve_meta(self, referrer: str) -> RequestMeta:
if self.params.organization_id is None:
raise Exception("An organization is required to resolve queries")
+ span = sentry_sdk.get_current_span()
+ if span:
+ span.set_tag("SearchResolver.params", self.params)
return RequestMeta(
organization_id=self.params.organization_id,
referrer=referrer,
@@ -65,10 +70,18 @@ def resolve_meta(self, referrer: str) -> RequestMeta:
end_timestamp=self.params.rpc_end_date,
)
- def resolve_query(self, querystring: str | None) -> TraceItemFilter | None:
+ @sentry_sdk.trace
+ def resolve_query(
+ self, querystring: str | None
+ ) -> tuple[TraceItemFilter | None, list[VirtualColumnContext | None]]:
"""Given a query string in the public search syntax eg. `span.description:foo` construct the TraceItemFilter"""
environment_query = self.__resolve_environment_query()
- query = self.__resolve_query(querystring)
+ query, contexts = self.__resolve_query(querystring)
+ span = sentry_sdk.get_current_span()
+ if span:
+ span.set_tag("SearchResolver.query_string", querystring)
+ span.set_tag("SearchResolver.resolved_query", query)
+ span.set_tag("SearchResolver.environment_query", environment_query)
# The RPC request meta does not contain the environment.
# So we have to inject it as a query condition.
@@ -78,18 +91,21 @@ def resolve_query(self, querystring: str | None) -> TraceItemFilter | None:
# But if both are defined, we AND them together.
if not environment_query:
- return query
+ return query, contexts
if not query:
- return environment_query
-
- return TraceItemFilter(
- and_filter=AndFilter(
- filters=[
- environment_query,
- query,
- ]
- )
+ return environment_query, []
+
+ return (
+ TraceItemFilter(
+ and_filter=AndFilter(
+ filters=[
+ environment_query,
+ query,
+ ]
+ )
+ ),
+ contexts,
)
def __resolve_environment_query(self) -> TraceItemFilter | None:
@@ -115,9 +131,11 @@ def __resolve_environment_query(self) -> TraceItemFilter | None:
return TraceItemFilter(and_filter=AndFilter(filters=filters))
- def __resolve_query(self, querystring: str | None) -> TraceItemFilter | None:
+ def __resolve_query(
+ self, querystring: str | None
+ ) -> tuple[TraceItemFilter | None, list[VirtualColumnContext | None]]:
if querystring is None:
- return None
+ return None, []
try:
parsed_terms = event_search.parse_search_query(
querystring,
@@ -142,8 +160,10 @@ def __resolve_query(self, querystring: str | None) -> TraceItemFilter | None:
def _resolve_boolean_conditions(
self, terms: event_filter.ParsedTerms
- ) -> TraceItemFilter | None:
- if len(terms) == 1:
+ ) -> tuple[TraceItemFilter | None, list[VirtualColumnContext | None]]:
+ if len(terms) == 0:
+ return None, []
+ elif len(terms) == 1:
if isinstance(terms[0], event_search.ParenExpression):
return self._resolve_boolean_conditions(terms[0].children)
elif isinstance(terms[0], event_search.SearchFilter):
@@ -196,38 +216,54 @@ def _resolve_boolean_conditions(
lhs, rhs = terms[:1], terms[1:]
operator = AndFilter
- resolved_lhs = self._resolve_boolean_conditions(lhs) if lhs else None
- resolved_rhs = self._resolve_boolean_conditions(rhs) if rhs else None
+ resolved_lhs, contexts_lhs = self._resolve_boolean_conditions(lhs)
+ resolved_rhs, contexts_rhs = self._resolve_boolean_conditions(rhs)
+ contexts = contexts_lhs + contexts_rhs
if resolved_lhs is not None and resolved_rhs is not None:
if operator == AndFilter:
- return TraceItemFilter(and_filter=AndFilter(filters=[resolved_lhs, resolved_rhs]))
+ return (
+ TraceItemFilter(and_filter=AndFilter(filters=[resolved_lhs, resolved_rhs])),
+ contexts,
+ )
else:
- return TraceItemFilter(or_filter=OrFilter(filters=[resolved_lhs, resolved_rhs]))
+ return (
+ TraceItemFilter(or_filter=OrFilter(filters=[resolved_lhs, resolved_rhs])),
+ contexts,
+ )
elif resolved_lhs is None and resolved_rhs is not None:
- return resolved_rhs
+ return resolved_rhs, contexts
elif resolved_lhs is not None and resolved_rhs is None:
- return resolved_lhs
+ return resolved_lhs, contexts
else:
- return None
+ return None, contexts
- def _resolve_terms(self, terms: event_filter.ParsedTerms) -> TraceItemFilter | None:
+ def _resolve_terms(
+ self, terms: event_filter.ParsedTerms
+ ) -> tuple[TraceItemFilter | None, list[VirtualColumnContext | None]]:
parsed_terms = []
+ resolved_contexts = []
for item in terms:
if isinstance(item, event_search.SearchFilter):
- parsed_terms.append(self.resolve_term(cast(event_search.SearchFilter, item)))
+ resolved_term, resolved_context = self.resolve_term(
+ cast(event_search.SearchFilter, item)
+ )
+ parsed_terms.append(resolved_term)
+ resolved_contexts.append(resolved_context)
else:
if self.config.use_aggregate_conditions:
raise NotImplementedError("Can't filter on aggregates yet")
if len(parsed_terms) > 1:
- return TraceItemFilter(and_filter=AndFilter(filters=parsed_terms))
+ return TraceItemFilter(and_filter=AndFilter(filters=parsed_terms)), resolved_contexts
elif len(parsed_terms) == 1:
- return parsed_terms[0]
+ return parsed_terms[0], resolved_contexts
else:
- return None
+ return None, []
- def resolve_term(self, term: event_search.SearchFilter) -> TraceItemFilter:
+ def resolve_term(
+ self, term: event_search.SearchFilter
+ ) -> tuple[TraceItemFilter, VirtualColumnContext | None]:
resolved_column, context = self.resolve_column(term.key.name)
raw_value = term.value.raw_value
if term.value.is_wildcard():
@@ -251,12 +287,15 @@ def resolve_term(self, term: event_search.SearchFilter) -> TraceItemFilter:
else:
raise InvalidSearchQuery(f"Unknown operator: {term.operator}")
if isinstance(resolved_column.proto_definition, AttributeKey):
- return TraceItemFilter(
- comparison_filter=ComparisonFilter(
- key=resolved_column.proto_definition,
- op=operator,
- value=self._resolve_search_value(resolved_column, term.operator, raw_value),
- )
+ return (
+ TraceItemFilter(
+ comparison_filter=ComparisonFilter(
+ key=resolved_column.proto_definition,
+ op=operator,
+ value=self._resolve_search_value(resolved_column, term.operator, raw_value),
+ )
+ ),
+ context,
)
else:
raise NotImplementedError("Can't filter on aggregates yet")
@@ -325,16 +364,20 @@ def clean_contexts(
final_contexts.append(context)
return final_contexts
+ @sentry_sdk.trace
def resolve_columns(
self, selected_columns: list[str]
- ) -> tuple[list[ResolvedColumn | ResolvedFunction], list[VirtualColumnContext]]:
+ ) -> tuple[list[ResolvedColumn | ResolvedFunction], list[VirtualColumnContext | None]]:
"""Given a list of columns resolve them and get their context if applicable
This function will also dedupe the virtual column contexts if necessary
"""
+ span = sentry_sdk.get_current_span()
resolved_columns = []
resolved_contexts = []
stripped_columns = [column.strip() for column in selected_columns]
+ if span:
+ span.set_tag("SearchResolver.selected_columns", stripped_columns)
has_aggregates = False
for column in stripped_columns:
match = fields.is_function(column)
@@ -355,7 +398,7 @@ def resolve_columns(
resolved_columns.append(project_column)
resolved_contexts.append(project_context)
- return resolved_columns, self.clean_contexts(resolved_contexts)
+ return resolved_columns, resolved_contexts
def resolve_column(
self, column: str, match: Match | None = None
@@ -372,6 +415,7 @@ def get_field_type(self, column: str) -> str:
resolved_column, _ = self.resolve_column(column)
return resolved_column.search_type
+ @sentry_sdk.trace
def resolve_attributes(
self, columns: list[str]
) -> tuple[list[ResolvedColumn], list[VirtualColumnContext | None]]:
@@ -420,6 +464,9 @@ def resolve_attribute(self, column: str) -> tuple[ResolvedColumn, VirtualColumnC
if field_type not in constants.TYPE_MAP:
raise InvalidSearchQuery(f"Unsupported type {field_type} in {column}")
+ if column.startswith("sentry_tags"):
+ field = f"sentry.{field}"
+
search_type = cast(constants.SearchType, field_type)
column_definition = ResolvedColumn(
public_alias=column, internal_name=field, search_type=search_type
@@ -432,6 +479,7 @@ def resolve_attribute(self, column: str) -> tuple[ResolvedColumn, VirtualColumnC
else:
raise InvalidSearchQuery(f"Could not parse {column}")
+ @sentry_sdk.trace
def resolve_aggregates(
self, columns: list[str]
) -> tuple[list[ResolvedFunction], list[VirtualColumnContext | None]]:
diff --git a/src/sentry/search/eap/utils.py b/src/sentry/search/eap/utils.py
new file mode 100644
index 00000000000000..28619c2929f47c
--- /dev/null
+++ b/src/sentry/search/eap/utils.py
@@ -0,0 +1,17 @@
+from datetime import datetime
+
+from google.protobuf.timestamp_pb2 import Timestamp
+from sentry_protos.snuba.v1.endpoint_time_series_pb2 import TimeSeriesRequest
+
+
+def add_start_end_conditions(
+ in_msg: TimeSeriesRequest, start: datetime, end: datetime
+) -> TimeSeriesRequest:
+ start_time_proto = Timestamp()
+ start_time_proto.FromDatetime(start)
+ end_time_proto = Timestamp()
+ end_time_proto.FromDatetime(end)
+ in_msg.meta.start_timestamp.CopyFrom(start_time_proto)
+ in_msg.meta.end_timestamp.CopyFrom(end_time_proto)
+
+ return in_msg
diff --git a/src/sentry/search/events/builder/metrics.py b/src/sentry/search/events/builder/metrics.py
index d57893ae33b027..46c84384d8bcb8 100644
--- a/src/sentry/search/events/builder/metrics.py
+++ b/src/sentry/search/events/builder/metrics.py
@@ -1953,10 +1953,6 @@ def __init__(
[column for column in self.columns if column not in self.aggregates]
)
- @cached_property
- def non_aggregate_columns(self) -> list[str]:
- return list(set(self.original_selected_columns) - set(self.timeseries_columns))
-
@property
def translated_groupby(self) -> list[str]:
"""Get the names of the groupby columns to create the series names"""
diff --git a/src/sentry/search/events/builder/spans_indexed.py b/src/sentry/search/events/builder/spans_indexed.py
index 79a82d81f46a1e..3e603f55cf5802 100644
--- a/src/sentry/search/events/builder/spans_indexed.py
+++ b/src/sentry/search/events/builder/spans_indexed.py
@@ -71,6 +71,13 @@ class SpansEAPQueryBuilder(BaseQueryBuilder):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
+ def get_field_type(self, field: str) -> str | None:
+ tag_match = constants.TYPED_TAG_KEY_RE.search(field)
+ field_type = tag_match.group("type") if tag_match else None
+ if field_type == "number":
+ return "number"
+ return super().get_field_type(field)
+
def resolve_field(self, raw_field: str, alias: bool = False) -> Column:
# try the typed regex first
if len(raw_field) > constants.MAX_TAG_KEY_LENGTH:
diff --git a/src/sentry/search/events/builder/spans_metrics.py b/src/sentry/search/events/builder/spans_metrics.py
index fd9688c4528089..5ab6ea363afb26 100644
--- a/src/sentry/search/events/builder/spans_metrics.py
+++ b/src/sentry/search/events/builder/spans_metrics.py
@@ -8,6 +8,13 @@
)
from sentry.search.events.datasets.spans_metrics import SpansMetricsDatasetConfig
from sentry.search.events.types import SelectType
+from sentry.snuba.metrics.naming_layer.mri import parse_mri
+
+SIZE_FIELDS = {
+ "http.decoded_response_content_length": "byte",
+ "http.response_content_length": "byte",
+ "http.response_transfer_size": "byte",
+}
class SpansMetricsQueryBuilder(MetricsQueryBuilder):
@@ -15,6 +22,7 @@ class SpansMetricsQueryBuilder(MetricsQueryBuilder):
spans_metrics_builder = True
has_transaction = False
config_class = SpansMetricsDatasetConfig
+ size_fields = SIZE_FIELDS
column_remapping = {
# We want to remap `message` to `span.description` for the free
@@ -33,6 +41,15 @@ def get_field_type(self, field: str) -> str | None:
if field in ["span.duration", "span.self_time"]:
return "duration"
+ if unit := self.size_fields.get(field):
+ return unit
+
+ mri = constants.SPAN_METRICS_MAP.get(field)
+ if mri is not None:
+ parsed_mri = parse_mri(mri)
+ if parsed_mri is not None and parsed_mri.unit in constants.RESULT_TYPES:
+ return parsed_mri.unit
+
return None
def resolve_select(
diff --git a/src/sentry/search/events/datasets/discover.py b/src/sentry/search/events/datasets/discover.py
index 9178ae61de05cd..d909e4ca6723d3 100644
--- a/src/sentry/search/events/datasets/discover.py
+++ b/src/sentry/search/events/datasets/discover.py
@@ -993,14 +993,6 @@ def function_converter(self) -> Mapping[str, SnQLFunction]:
snql_aggregate=self._resolve_web_vital_score_function,
default_result_type="number",
),
- SnQLFunction(
- "weighted_performance_score",
- required_args=[
- NumericColumn("column"),
- ],
- snql_aggregate=self._resolve_weighted_web_vital_score_function,
- default_result_type="number",
- ),
SnQLFunction(
"opportunity_score",
required_args=[
@@ -1121,98 +1113,97 @@ def _resolve_http_status_code(self, _: str) -> SelectType:
@cached_property
def _resolve_project_threshold_config(self) -> SelectType:
- org_id = (
- self.builder.params.organization.id
- if self.builder.params.organization is not None
- else None
- )
+ project_thresholds = {}
+ project_threshold_config_keys = []
+ project_threshold_config_values = []
+
+ project_threshold_override_config_keys = []
+ project_threshold_override_config_values = []
+
+ org_id = self.builder.params.organization_id
project_ids = self.builder.params.project_ids
- project_threshold_configs = (
- ProjectTransactionThreshold.objects.filter(
- organization_id=org_id,
- project_id__in=project_ids,
+ if org_id is not None:
+ project_threshold_configs = (
+ ProjectTransactionThreshold.objects.filter(
+ organization_id=org_id,
+ project_id__in=project_ids,
+ )
+ .order_by("project_id")
+ .values_list("project_id", "threshold", "metric")
)
- .order_by("project_id")
- .values_list("project_id", "threshold", "metric")
- )
- transaction_threshold_configs = (
- ProjectTransactionThresholdOverride.objects.filter(
- organization_id=org_id,
- project_id__in=project_ids,
+ transaction_threshold_configs = (
+ ProjectTransactionThresholdOverride.objects.filter(
+ organization_id=org_id,
+ project_id__in=project_ids,
+ )
+ .order_by("project_id")
+ .values_list("transaction", "project_id", "threshold", "metric")
)
- .order_by("project_id")
- .values_list("transaction", "project_id", "threshold", "metric")
- )
- num_project_thresholds = project_threshold_configs.count()
- sentry_sdk.set_tag("project_threshold.count", num_project_thresholds)
- sentry_sdk.set_tag(
- "project_threshold.count.grouped",
- format_grouped_length(num_project_thresholds, [10, 100, 250, 500]),
- )
-
- num_transaction_thresholds = transaction_threshold_configs.count()
- sentry_sdk.set_tag("txn_threshold.count", num_transaction_thresholds)
- sentry_sdk.set_tag(
- "txn_threshold.count.grouped",
- format_grouped_length(num_transaction_thresholds, [10, 100, 250, 500]),
- )
-
- if (
- num_project_thresholds + num_transaction_thresholds
- > MAX_QUERYABLE_TRANSACTION_THRESHOLDS
- ):
- raise InvalidSearchQuery(
- f"Exceeded {MAX_QUERYABLE_TRANSACTION_THRESHOLDS} configured transaction thresholds limit, try with fewer Projects."
+ num_project_thresholds = project_threshold_configs.count()
+ sentry_sdk.set_tag("project_threshold.count", num_project_thresholds)
+ sentry_sdk.set_tag(
+ "project_threshold.count.grouped",
+ format_grouped_length(num_project_thresholds, [10, 100, 250, 500]),
)
- # Arrays need to have toUint64 casting because clickhouse will define the type as the narrowest possible type
- # that can store listed argument types, which means the comparison will fail because of mismatched types
- project_thresholds = {}
- project_threshold_config_keys = []
- project_threshold_config_values = []
- for project_id, threshold, metric in project_threshold_configs:
- metric = TRANSACTION_METRICS[metric]
- if (
- threshold == DEFAULT_PROJECT_THRESHOLD
- and metric == DEFAULT_PROJECT_THRESHOLD_METRIC
- ):
- # small optimization, if the configuration is equal to the default,
- # we can skip it in the final query
- continue
-
- project_thresholds[project_id] = (metric, threshold)
- project_threshold_config_keys.append(Function("toUInt64", [project_id]))
- project_threshold_config_values.append((metric, threshold))
+ num_transaction_thresholds = transaction_threshold_configs.count()
+ sentry_sdk.set_tag("txn_threshold.count", num_transaction_thresholds)
+ sentry_sdk.set_tag(
+ "txn_threshold.count.grouped",
+ format_grouped_length(num_transaction_thresholds, [10, 100, 250, 500]),
+ )
- project_threshold_override_config_keys = []
- project_threshold_override_config_values = []
- for transaction, project_id, threshold, metric in transaction_threshold_configs:
- metric = TRANSACTION_METRICS[metric]
if (
- project_id in project_thresholds
- and threshold == project_thresholds[project_id][1]
- and metric == project_thresholds[project_id][0]
+ num_project_thresholds + num_transaction_thresholds
+ > MAX_QUERYABLE_TRANSACTION_THRESHOLDS
):
- # small optimization, if the configuration is equal to the project
- # configs, we can skip it in the final query
- continue
-
- elif (
- project_id not in project_thresholds
- and threshold == DEFAULT_PROJECT_THRESHOLD
- and metric == DEFAULT_PROJECT_THRESHOLD_METRIC
- ):
- # small optimization, if the configuration is equal to the default
- # and no project configs were set, we can skip it in the final query
- continue
+ raise InvalidSearchQuery(
+ f"Exceeded {MAX_QUERYABLE_TRANSACTION_THRESHOLDS} configured transaction thresholds limit, try with fewer Projects."
+ )
- project_threshold_override_config_keys.append(
- (Function("toUInt64", [project_id]), transaction)
- )
- project_threshold_override_config_values.append((metric, threshold))
+ # Arrays need to have toUint64 casting because clickhouse will define the type as the narrowest possible type
+ # that can store listed argument types, which means the comparison will fail because of mismatched types
+ for project_id, threshold, metric in project_threshold_configs:
+ metric_name = TRANSACTION_METRICS[metric]
+ if (
+ threshold == DEFAULT_PROJECT_THRESHOLD
+ and metric_name == DEFAULT_PROJECT_THRESHOLD_METRIC
+ ):
+ # small optimization, if the configuration is equal to the default,
+ # we can skip it in the final query
+ continue
+
+ project_thresholds[project_id] = (metric_name, threshold)
+ project_threshold_config_keys.append(Function("toUInt64", [project_id]))
+ project_threshold_config_values.append((metric_name, threshold))
+
+ for transaction, project_id, threshold, metric in transaction_threshold_configs:
+ metric_name = TRANSACTION_METRICS[metric]
+ if (
+ project_id in project_thresholds
+ and threshold == project_thresholds[project_id][1]
+ and metric_name == project_thresholds[project_id][0]
+ ):
+ # small optimization, if the configuration is equal to the project
+ # configs, we can skip it in the final query
+ continue
+
+ elif (
+ project_id not in project_thresholds
+ and threshold == DEFAULT_PROJECT_THRESHOLD
+ and metric_name == DEFAULT_PROJECT_THRESHOLD_METRIC
+ ):
+ # small optimization, if the configuration is equal to the default
+ # and no project configs were set, we can skip it in the final query
+ continue
+
+ project_threshold_override_config_keys.append(
+ (Function("toUInt64", [project_id]), transaction)
+ )
+ project_threshold_override_config_values.append((metric_name, threshold))
project_threshold_config_index: SelectType = Function(
"indexOf",
@@ -1710,58 +1701,6 @@ def _resolve_web_vital_score_function(
alias,
)
- def _resolve_weighted_web_vital_score_function(
- self,
- args: Mapping[str, Column],
- alias: str,
- ) -> SelectType:
- column = args["column"]
- if column.key not in [
- "score.lcp",
- "score.fcp",
- "score.fid",
- "score.cls",
- "score.ttfb",
- ]:
- raise InvalidSearchQuery(
- "weighted_performance_score only supports performance score measurements"
- )
- total_score_column = self.builder.column("measurements.score.total")
- return Function(
- "greatest",
- [
- Function(
- "least",
- [
- Function(
- "divide",
- [
- Function(
- "sum",
- [column],
- ),
- Function(
- "countIf",
- [
- Function(
- "greaterOrEquals",
- [
- total_score_column,
- 0,
- ],
- )
- ],
- ),
- ],
- ),
- 1.0,
- ],
- ),
- 0.0,
- ],
- alias,
- )
-
def _resolve_web_vital_opportunity_score_function(
self,
args: Mapping[str, Column],
@@ -1777,7 +1716,7 @@ def _resolve_web_vital_opportunity_score_function(
"score.total",
]:
raise InvalidSearchQuery(
- "weighted_performance_score only supports performance score measurements"
+ "opportunity_score only supports performance score measurements"
)
weight_column = (
@@ -1867,7 +1806,7 @@ def _issue_filter_converter(self, search_filter: SearchFilter) -> WhereType | No
value = to_list(search_filter.value.value)
# `unknown` is a special value for when there is no issue associated with the event
group_short_ids = [v for v in value if v and v != "unknown"]
- general_group_filter_values = ["" for v in value if not v or v == "unknown"]
+ general_group_filter_values = [0 for v in value if not v or v == "unknown"]
if group_short_ids and self.builder.params.organization is not None:
try:
diff --git a/src/sentry/search/events/datasets/function_aliases.py b/src/sentry/search/events/datasets/function_aliases.py
index 071cccd476b759..70480101f3b9f6 100644
--- a/src/sentry/search/events/datasets/function_aliases.py
+++ b/src/sentry/search/events/datasets/function_aliases.py
@@ -12,19 +12,16 @@
from sentry.search.events import constants
from sentry.search.events.builder.base import BaseQueryBuilder
from sentry.search.events.types import SelectType
-from sentry.sentry_metrics.configuration import UseCaseKey
-from sentry.sentry_metrics.use_case_id_registry import UseCaseID
from sentry.utils.hashlib import fnv1a_32
def resolve_project_threshold_config(
# See resolve_tag_value signature
- tag_value_resolver: Callable[[UseCaseID | UseCaseKey, int, str], int | str | None],
+ tag_value_resolver: Callable[[int, str], int | str | None],
# See resolve_tag_key signature
- column_name_resolver: Callable[[UseCaseID | UseCaseKey, int, str], str],
+ column_name_resolver: Callable[[int, str], str],
project_ids: Sequence[int],
org_id: int,
- use_case_id: UseCaseID | None = None,
) -> SelectType:
"""
Shared function that resolves the project threshold configuration used by both snuba/metrics
@@ -89,7 +86,7 @@ def resolve_project_threshold_config(
# and no project configs were set, we can skip it in the final query
continue
- transaction_id = tag_value_resolver(use_case_id, org_id, transaction)
+ transaction_id = tag_value_resolver(org_id, transaction)
# Don't add to the config if we can't resolve it
if transaction_id is None:
continue
@@ -116,7 +113,7 @@ def resolve_project_threshold_config(
project_threshold_override_config_keys,
(
Column(name="project_id"),
- Column(name=column_name_resolver(use_case_id, org_id, "transaction")),
+ Column(name=column_name_resolver(org_id, "transaction")),
),
],
constants.PROJECT_THRESHOLD_OVERRIDE_CONFIG_INDEX_ALIAS,
@@ -183,7 +180,7 @@ def resolve_metrics_percentile(
fixed_percentile: float | None = None,
extra_conditions: list[Function] | None = None,
) -> SelectType:
- if fixed_percentile is None:
+ if fixed_percentile is None and isinstance(args["percentile"], float):
fixed_percentile = args["percentile"]
if fixed_percentile not in constants.METRIC_PERCENTILES:
raise IncompatibleMetricsQuery("Custom quantile incompatible with metrics")
@@ -242,6 +239,10 @@ def resolve_avg_compare_if(
alias: str | None,
) -> SelectType:
"""Helper function for avg compare"""
+ if not isinstance(args["comparison_column"], str):
+ raise InvalidSearchQuery(
+ f"Invalid column type: expected got {args['comparison_column']}"
+ )
return Function(
"avgIf",
[
@@ -280,10 +281,12 @@ def resolve_metrics_layer_percentile(
fixed_percentile: float | None = None,
) -> SelectType:
# TODO: rename to just resolve_metrics_percentile once the non layer code can be retired
- if fixed_percentile is None:
+ if fixed_percentile is None and isinstance(args["percentile"], float):
fixed_percentile = args["percentile"]
if fixed_percentile not in constants.METRIC_PERCENTILES:
raise IncompatibleMetricsQuery("Custom quantile incompatible with metrics")
+ if not isinstance(args["column"], str):
+ raise InvalidSearchQuery(f"Invalid column type: expected got {args['column']}")
column = resolve_mri(args["column"])
return (
Function(
@@ -305,7 +308,7 @@ def resolve_metrics_layer_percentile(
def resolve_division(
- dividend: SelectType, divisor: SelectType, alias: str, fallback: Any | None = None
+ dividend: SelectType, divisor: SelectType, alias: str | None, fallback: Any | None = None
) -> SelectType:
return Function(
"if",
diff --git a/src/sentry/search/events/datasets/metrics.py b/src/sentry/search/events/datasets/metrics.py
index 54e02d63f41e86..faf147d5a023a9 100644
--- a/src/sentry/search/events/datasets/metrics.py
+++ b/src/sentry/search/events/datasets/metrics.py
@@ -1,10 +1,11 @@
from __future__ import annotations
-from collections.abc import Callable, Mapping
+from collections.abc import Callable, Mapping, MutableMapping
from django.utils.functional import cached_property
from snuba_sdk import Column, Condition, Function, Op, OrderBy
+from sentry import features
from sentry.api.event_search import SearchFilter
from sentry.exceptions import IncompatibleMetricsQuery, InvalidSearchQuery
from sentry.search.events import constants, fields
@@ -80,11 +81,6 @@ def resolve_metric(self, value: str) -> int:
self.builder.metric_ids.add(metric_id)
return metric_id
- def resolve_value(self, value: str) -> int:
- value_id = self.builder.resolve_tag_value(value)
-
- return value_id
-
@property
def should_skip_interval_calculation(self):
return self.builder.builder_config.skip_time_conditions and (
@@ -648,26 +644,6 @@ def function_converter(self) -> Mapping[str, fields.MetricsFunction]:
snql_distribution=self._resolve_web_vital_score_function,
default_result_type="number",
),
- fields.MetricsFunction(
- "weighted_performance_score",
- required_args=[
- fields.MetricArg(
- "column",
- allowed_columns=[
- "measurements.score.fcp",
- "measurements.score.lcp",
- "measurements.score.fid",
- "measurements.score.inp",
- "measurements.score.cls",
- "measurements.score.ttfb",
- ],
- allow_custom_measurements=False,
- )
- ],
- calculated_args=[resolve_metric_id],
- snql_distribution=self._resolve_weighted_web_vital_score_function,
- default_result_type="number",
- ),
fields.MetricsFunction(
"opportunity_score",
required_args=[
@@ -1005,16 +981,13 @@ def _resolve_transaction_alias_on_demand(self, _: str) -> SelectType:
@cached_property
def _resolve_project_threshold_config(self) -> SelectType:
+ org_id = self.builder.params.organization_id
+ if org_id is None:
+ raise InvalidSearchQuery("Missing organization")
return function_aliases.resolve_project_threshold_config(
- tag_value_resolver=lambda _use_case_id, _org_id, value: self.builder.resolve_tag_value(
- value
- ),
- column_name_resolver=lambda _use_case_id, _org_id, value: self.builder.resolve_column_name(
- value
- ),
- org_id=(
- self.builder.params.organization.id if self.builder.params.organization else None
- ),
+ tag_value_resolver=lambda _org_id, value: self.builder.resolve_tag_value(value),
+ column_name_resolver=lambda _org_id, value: self.builder.resolve_column_name(value),
+ org_id=org_id,
project_ids=self.builder.params.project_ids,
)
@@ -1073,17 +1046,18 @@ def _transaction_filter_converter(self, search_filter: SearchFilter) -> WhereTyp
return None
if isinstance(value, list):
- resolved_value = []
+ resolved_values = []
for item in value:
resolved_item = self.builder.resolve_tag_value(item)
if resolved_item is None:
raise IncompatibleMetricsQuery(f"Transaction value {item} in filter not found")
- resolved_value.append(resolved_item)
+ resolved_values.append(resolved_item)
+ value = resolved_values
else:
resolved_value = self.builder.resolve_tag_value(value)
if resolved_value is None:
raise IncompatibleMetricsQuery(f"Transaction value {value} in filter not found")
- value = resolved_value
+ value = resolved_value
if search_filter.value.is_wildcard():
return Condition(
@@ -1260,8 +1234,9 @@ def _resolve_histogram_function(
buckets"""
zoom_params = getattr(self.builder, "zoom_params", None)
num_buckets = getattr(self.builder, "num_buckets", 250)
+ histogram_aliases = getattr(self.builder, "histogram_aliases", [])
+ histogram_aliases.append(alias)
metric_condition = Function("equals", [Column("metric_id"), args["metric_id"]])
- self.builder.histogram_aliases.append(alias)
return Function(
f"histogramIf({num_buckets})",
[
@@ -1326,6 +1301,8 @@ def _resolve_user_misery_function(
args: Mapping[str, str | Column | SelectType | int | float],
alias: str | None = None,
) -> SelectType:
+ if not isinstance(args["alpha"], float) or not isinstance(args["beta"], float):
+ raise InvalidSearchQuery("Cannot query user_misery with non floating point alpha/beta")
if args["satisfaction"] is not None:
raise IncompatibleMetricsQuery(
"Cannot query user_misery with a threshold parameter on the metrics dataset"
@@ -1493,9 +1470,13 @@ def _resolve_web_vital_function(
) -> SelectType:
column = args["column"]
metric_id = args["metric_id"]
- quality = args["quality"].lower()
+ quality = args["quality"]
- if column not in [
+ if not isinstance(quality, str):
+ raise InvalidSearchQuery(f"Invalid argument quanlity: {quality}")
+ quality = quality.lower()
+
+ if not isinstance(column, str) or column not in [
"measurements.lcp",
"measurements.fcp",
"measurements.fp",
@@ -1549,12 +1530,18 @@ def _resolve_web_vital_function(
def _resolve_web_vital_score_function(
self,
args: Mapping[str, str | Column | SelectType | int | float],
- alias: str,
+ alias: str | None,
) -> SelectType:
+ """Returns the normalized score (0.0-1.0) for a given web vital.
+ This function exists because we don't store a metric for the normalized score.
+ The normalized score is calculated by dividing the sum of measurements.score.* by the sum of measurements.score.weight.*
+
+ To calculate the total performance score, see _resolve_total_performance_score_function.
+ """
column = args["column"]
metric_id = args["metric_id"]
- if column not in [
+ if not isinstance(column, str) or column not in [
"measurements.score.lcp",
"measurements.score.fcp",
"measurements.score.fid",
@@ -1630,115 +1617,6 @@ def _resolve_web_vital_score_function(
alias,
)
- def _resolve_weighted_web_vital_score_function(
- self,
- args: Mapping[str, str | Column | SelectType | int | float],
- alias: str,
- ) -> SelectType:
- column = args["column"]
- metric_id = args["metric_id"]
-
- if column not in [
- "measurements.score.lcp",
- "measurements.score.fcp",
- "measurements.score.fid",
- "measurements.score.inp",
- "measurements.score.cls",
- "measurements.score.ttfb",
- ]:
- raise InvalidSearchQuery("performance_score only supports measurements")
-
- return Function(
- "greatest",
- [
- Function(
- "least",
- [
- Function(
- "if",
- [
- Function(
- "and",
- [
- Function(
- "greater",
- [
- Function(
- "sumIf",
- [
- Column("value"),
- Function(
- "equals",
- [Column("metric_id"), metric_id],
- ),
- ],
- ),
- 0,
- ],
- ),
- Function(
- "greater",
- [
- Function(
- "countIf",
- [
- Column("value"),
- Function(
- "equals",
- [
- Column("metric_id"),
- self.resolve_metric(
- "measurements.score.total"
- ),
- ],
- ),
- ],
- ),
- 0,
- ],
- ),
- ],
- ),
- Function(
- "divide",
- [
- Function(
- "sumIf",
- [
- Column("value"),
- Function(
- "equals", [Column("metric_id"), metric_id]
- ),
- ],
- ),
- Function(
- "countIf",
- [
- Column("value"),
- Function(
- "equals",
- [
- Column("metric_id"),
- self.resolve_metric(
- "measurements.score.total"
- ),
- ],
- ),
- ],
- ),
- ],
- ),
- 0.0,
- ],
- ),
- 1.0,
- ],
- ),
- 0.0,
- ],
- alias,
- )
-
def _resolve_web_vital_opportunity_score_function(
self,
args: Mapping[str, str | Column | SelectType | int | float],
@@ -1747,7 +1625,7 @@ def _resolve_web_vital_opportunity_score_function(
column = args["column"]
metric_id = args["metric_id"]
- if column not in [
+ if not isinstance(column, str) or column not in [
"measurements.score.lcp",
"measurements.score.fcp",
"measurements.score.fid",
@@ -1903,7 +1781,7 @@ def _resolve_total_web_vital_opportunity_score_with_fixed_weights_function(
alias,
)
- def _resolve_total_score_weights_function(self, column: str, alias: str) -> SelectType:
+ def _resolve_total_score_weights_function(self, column: str, alias: str | None) -> SelectType:
"""Calculates the total sum score weights for a given web vital.
This must be cached since it runs another query."""
@@ -1962,8 +1840,16 @@ def _resolve_count_scores_function(
def _resolve_total_performance_score_function(
self,
_: Mapping[str, str | Column | SelectType | int | float],
- alias: str,
+ alias: str | None,
) -> SelectType:
+ """Returns the total performance score based on a page/site's web vitals.
+ This function is calculated by:
+ the summation of (normalized_vital_score * weight) for each vital, divided by the sum of all weights
+ - normalized_vital_score is the 0.0-1.0 score for each individual vital
+ - weight is the 0.0-1.0 weight for each individual vital (this is a constant value stored in constants.WEB_VITALS_PERFORMANCE_SCORE_WEIGHTS)
+ - if all webvitals have data, then the sum of all weights is 1
+ - normalized_vital_score is obtained through _resolve_web_vital_score_function (see docstring on that function for more details)
+ """
vitals = ["lcp", "fcp", "cls", "ttfb", "inp"]
scores = {
vital: Function(
@@ -1982,9 +1868,38 @@ def _resolve_total_performance_score_function(
for vital in vitals
}
+ weights = {
+ vital: Function(
+ "if",
+ [
+ Function(
+ "isZeroOrNull",
+ [
+ Function(
+ "countIf",
+ [
+ Column("value"),
+ Function(
+ "equals",
+ [
+ Column("metric_id"),
+ self.resolve_metric(f"measurements.score.{vital}"),
+ ],
+ ),
+ ],
+ ),
+ ],
+ ),
+ 0,
+ constants.WEB_VITALS_PERFORMANCE_SCORE_WEIGHTS[vital],
+ ],
+ )
+ for vital in vitals
+ }
+
# TODO: Is there a way to sum more than 2 values at once?
return Function(
- "plus",
+ "divide",
[
Function(
"plus",
@@ -1995,17 +1910,54 @@ def _resolve_total_performance_score_function(
Function(
"plus",
[
- scores["lcp"],
- scores["fcp"],
+ Function(
+ "plus",
+ [
+ scores["lcp"],
+ scores["fcp"],
+ ],
+ ),
+ scores["cls"],
],
),
- scores["cls"],
+ scores["ttfb"],
],
),
- scores["ttfb"],
+ scores["inp"],
],
),
- scores["inp"],
+ (
+ Function(
+ "plus",
+ [
+ Function(
+ "plus",
+ [
+ Function(
+ "plus",
+ [
+ Function(
+ "plus",
+ [
+ weights["lcp"],
+ weights["fcp"],
+ ],
+ ),
+ weights["cls"],
+ ],
+ ),
+ weights["ttfb"],
+ ],
+ ),
+ weights["inp"],
+ ],
+ )
+ if features.has(
+ "organizations:performance-vitals-handle-missing-webvitals",
+ self.builder.params.organization,
+ )
+ else 1
+ ),
],
alias,
)
@@ -2044,6 +1996,8 @@ def _resolve_total_transaction_duration(self, alias: str, scope: str) -> SelectT
def _resolve_time_spent_percentage(
self, args: Mapping[str, str | Column | SelectType | int | float], alias: str
) -> SelectType:
+ if not isinstance(args["scope"], str):
+ raise InvalidSearchQuery(f"Invalid scope: {args['scope']}")
total_time = self._resolve_total_transaction_duration(
constants.TOTAL_TRANSACTION_DURATION_ALIAS, args["scope"]
)
@@ -2066,7 +2020,7 @@ def _resolve_time_spent_percentage(
def _resolve_epm(
self,
- args: Mapping[str, str | Column | SelectType | int | float],
+ args: MutableMapping[str, str | Column | SelectType | int | float],
alias: str | None = None,
extra_condition: Function | None = None,
) -> SelectType:
@@ -2076,7 +2030,7 @@ def _resolve_epm(
def _resolve_spm(
self,
- args: Mapping[str, str | Column | SelectType | int | float],
+ args: MutableMapping[str, str | Column | SelectType | int | float],
alias: str | None = None,
extra_condition: Function | None = None,
) -> SelectType:
@@ -2086,7 +2040,7 @@ def _resolve_spm(
def _resolve_eps(
self,
- args: Mapping[str, str | Column | SelectType | int | float],
+ args: MutableMapping[str, str | Column | SelectType | int | float],
alias: str | None = None,
extra_condition: Function | None = None,
) -> SelectType:
@@ -2100,7 +2054,7 @@ def _resolve_rate(
args: Mapping[str, str | Column | SelectType | int | float],
alias: str | None = None,
extra_condition: Function | None = None,
- metric: str | None = "transaction.duration",
+ metric: str = "transaction.duration",
) -> SelectType:
base_condition = Function(
"equals",
diff --git a/src/sentry/search/events/datasets/metrics_summaries.py b/src/sentry/search/events/datasets/metrics_summaries.py
deleted file mode 100644
index dd1076fa8ff8e4..00000000000000
--- a/src/sentry/search/events/datasets/metrics_summaries.py
+++ /dev/null
@@ -1,120 +0,0 @@
-from __future__ import annotations
-
-from collections.abc import Callable, Mapping
-
-from snuba_sdk import And, Column, Condition, Direction, Function, Op, OrderBy
-
-from sentry.api.event_search import SearchFilter
-from sentry.search.events import constants
-from sentry.search.events.builder.base import BaseQueryBuilder
-from sentry.search.events.datasets import field_aliases, filter_aliases, function_aliases
-from sentry.search.events.datasets.base import DatasetConfig
-from sentry.search.events.fields import IntervalDefault, NumberRange, SnQLFunction, with_default
-from sentry.search.events.types import SelectType, WhereType
-
-
-class MetricsSummariesDatasetConfig(DatasetConfig):
- def __init__(self, builder: BaseQueryBuilder):
- self.builder = builder
-
- @property
- def search_filter_converter(
- self,
- ) -> Mapping[str, Callable[[SearchFilter], WhereType | None]]:
- return {
- constants.PROJECT_ALIAS: self._project_slug_filter_converter,
- constants.PROJECT_NAME_ALIAS: self._project_slug_filter_converter,
- "metric": self._metric_filter_converter,
- }
-
- @property
- def field_alias_converter(self) -> Mapping[str, Callable[[str], SelectType]]:
- return {
- constants.PROJECT_ALIAS: self._resolve_project_slug_alias,
- constants.PROJECT_NAME_ALIAS: self._resolve_project_slug_alias,
- "avg_metric": self._resolve_avg_alias,
- }
-
- @property
- def function_converter(self) -> Mapping[str, SnQLFunction]:
- return {
- function.name: function
- for function in [
- SnQLFunction(
- "examples",
- snql_aggregate=self._resolve_random_samples,
- optional_args=[with_default(1, NumberRange("count", 1, None))],
- private=True,
- ),
- SnQLFunction(
- "rounded_timestamp",
- required_args=[IntervalDefault("interval", 1, None)],
- snql_column=lambda args, alias: function_aliases.resolve_rounded_timestamp(
- args["interval"], alias, timestamp_column="end_timestamp"
- ),
- private=True,
- ),
- ]
- }
-
- @property
- def orderby_converter(self) -> Mapping[str, Callable[[Direction], OrderBy]]:
- return {}
-
- def _project_slug_filter_converter(self, search_filter: SearchFilter) -> WhereType | None:
- return filter_aliases.project_slug_converter(self.builder, search_filter)
-
- def _metric_filter_converter(self, search_filter: SearchFilter) -> WhereType | None:
- column = search_filter.key.name
- value = search_filter.value.value
- return And(
- [
- Condition(self.builder.column(column), Op.EQ, value),
- # The metrics summaries table orders by the cityHash64 of the metric name.
- # In order to take full advantage of the order by of the table, add an
- # additional condition on the cityHash64 of the metric name.
- Condition(
- Function("cityHash64", [self.builder.column(column)]),
- Op.EQ,
- Function("cityHash64", [value]),
- ),
- ]
- )
-
- def _resolve_project_slug_alias(self, alias: str) -> SelectType:
- return field_aliases.resolve_project_slug_alias(self.builder, alias)
-
- def _resolve_avg_alias(self, alias: str) -> SelectType:
- return Function(
- "divide",
- [self.builder.column("sum_metric"), self.builder.column("count_metric")],
- alias,
- )
-
- def _resolve_random_samples(
- self,
- args: Mapping[str, str | Column | SelectType | int | float],
- alias: str,
- ) -> SelectType:
- offset = 0 if self.builder.offset is None else self.builder.offset.offset
- limit = 0 if self.builder.limit is None else self.builder.limit.limit
- return function_aliases.resolve_random_samples(
- [
- # DO NOT change the order of these columns as it
- # changes the order of the tuple in the response
- # which WILL cause errors where it assumes this
- # order
- self.builder.resolve_column("span.group"),
- self.builder.resolve_column("timestamp"),
- self.builder.resolve_column("id"),
- self.builder.resolve_column("min_metric"),
- self.builder.resolve_column("max_metric"),
- self.builder.resolve_column("sum_metric"),
- self.builder.resolve_column("count_metric"),
- self.builder.resolve_column("avg_metric"),
- ],
- alias,
- offset,
- limit,
- size=int(args["count"]),
- )
diff --git a/src/sentry/search/events/datasets/spans_metrics.py b/src/sentry/search/events/datasets/spans_metrics.py
index 358dab0107a887..5713605e3b93d0 100644
--- a/src/sentry/search/events/datasets/spans_metrics.py
+++ b/src/sentry/search/events/datasets/spans_metrics.py
@@ -5,7 +5,7 @@
from typing import TypedDict
import sentry_sdk
-from snuba_sdk import AliasedExpression, Column, Condition, Function, Identifier, Op, OrderBy
+from snuba_sdk import Column, Condition, Function, Identifier, Op, OrderBy
from sentry.api.event_search import SearchFilter
from sentry.exceptions import IncompatibleMetricsQuery, InvalidSearchQuery
@@ -16,7 +16,6 @@
from sentry.search.events.fields import SnQLStringArg, get_function_alias
from sentry.search.events.types import SelectType, WhereType
from sentry.search.utils import DEVICE_CLASS
-from sentry.snuba.metrics.naming_layer.mri import SpanMRI
from sentry.snuba.referrer import Referrer
@@ -1362,278 +1361,3 @@ def _resolve_trace_error_count(
@property
def orderby_converter(self) -> Mapping[str, OrderBy]:
return {}
-
-
-class SpansMetricsLayerDatasetConfig(DatasetConfig):
- missing_function_error = IncompatibleMetricsQuery
-
- def __init__(self, builder: spans_metrics.SpansMetricsQueryBuilder):
- self.builder = builder
- self.total_span_duration: float | None = None
-
- def resolve_mri(self, value: str) -> Column:
- """Given the public facing column name resolve it to the MRI and return a Column"""
- # If the query builder has not detected a transaction use the light self time metric to get a performance boost
- if value == "span.self_time" and not self.builder.has_transaction:
- return Column(constants.SELF_TIME_LIGHT)
- else:
- return Column(constants.SPAN_METRICS_MAP[value])
-
- @property
- def search_filter_converter(
- self,
- ) -> Mapping[str, Callable[[SearchFilter], WhereType | None]]:
- return {}
-
- @property
- def field_alias_converter(self) -> Mapping[str, Callable[[str], SelectType]]:
- return {
- constants.SPAN_MODULE_ALIAS: lambda alias: field_aliases.resolve_span_module(
- self.builder, alias
- )
- }
-
- @property
- def function_converter(self) -> Mapping[str, fields.MetricsFunction]:
- """Make sure to update METRIC_FUNCTION_LIST_BY_TYPE when adding functions here, can't be a dynamic list since
- the Metric Layer will actually handle which dataset each function goes to
- """
-
- function_converter = {
- function.name: function
- for function in [
- fields.MetricsFunction(
- "count_unique",
- required_args=[
- fields.MetricArg(
- "column",
- allowed_columns=["user"],
- allow_custom_measurements=False,
- )
- ],
- snql_metric_layer=lambda args, alias: Function(
- "count_unique",
- [self.resolve_mri("user")],
- alias,
- ),
- default_result_type="integer",
- ),
- fields.MetricsFunction(
- "epm",
- snql_metric_layer=lambda args, alias: Function(
- "rate",
- [
- self.resolve_mri("span.self_time"),
- args["interval"],
- 60,
- ],
- alias,
- ),
- optional_args=[fields.IntervalDefault("interval", 1, None)],
- default_result_type="rate",
- ),
- fields.MetricsFunction(
- "eps",
- snql_metric_layer=lambda args, alias: Function(
- "rate",
- [
- self.resolve_mri("span.self_time"),
- args["interval"],
- 1,
- ],
- alias,
- ),
- optional_args=[fields.IntervalDefault("interval", 1, None)],
- default_result_type="rate",
- ),
- fields.MetricsFunction(
- "count",
- snql_metric_layer=lambda args, alias: Function(
- "count",
- [
- self.resolve_mri("span.self_time"),
- ],
- alias,
- ),
- default_result_type="integer",
- ),
- fields.MetricsFunction(
- "sum",
- optional_args=[
- fields.with_default(
- "span.self_time",
- fields.MetricArg(
- "column",
- allowed_columns=constants.SPAN_METRIC_SUMMABLE_COLUMNS,
- allow_custom_measurements=False,
- ),
- ),
- ],
- snql_metric_layer=lambda args, alias: Function(
- "sum",
- [self.resolve_mri(args["column"])],
- alias,
- ),
- default_result_type="duration",
- ),
- fields.MetricsFunction(
- "avg",
- optional_args=[
- fields.with_default(
- "span.self_time",
- fields.MetricArg(
- "column",
- allowed_columns=constants.SPAN_METRIC_DURATION_COLUMNS.union(
- constants.SPAN_METRIC_BYTES_COLUMNS
- ),
- ),
- ),
- ],
- snql_metric_layer=lambda args, alias: Function(
- "avg",
- [self.resolve_mri(args["column"])],
- alias,
- ),
- result_type_fn=self.reflective_result_type(),
- default_result_type="duration",
- ),
- fields.MetricsFunction(
- "percentile",
- required_args=[
- fields.with_default(
- "span.self_time",
- fields.MetricArg(
- "column", allowed_columns=constants.SPAN_METRIC_DURATION_COLUMNS
- ),
- ),
- fields.NumberRange("percentile", 0, 1),
- ],
- snql_metric_layer=lambda args, alias: function_aliases.resolve_metrics_layer_percentile(
- args,
- alias,
- self.resolve_mri,
- ),
- result_type_fn=self.reflective_result_type(),
- default_result_type="duration",
- ),
- fields.MetricsFunction(
- "p50",
- optional_args=[
- fields.with_default(
- "span.self_time",
- fields.MetricArg(
- "column",
- allowed_columns=constants.SPAN_METRIC_DURATION_COLUMNS,
- allow_custom_measurements=False,
- ),
- ),
- ],
- snql_metric_layer=lambda args, alias: function_aliases.resolve_metrics_layer_percentile(
- args=args, alias=alias, resolve_mri=self.resolve_mri, fixed_percentile=0.50
- ),
- default_result_type="duration",
- ),
- fields.MetricsFunction(
- "p75",
- optional_args=[
- fields.with_default(
- "span.self_time",
- fields.MetricArg(
- "column",
- allowed_columns=constants.SPAN_METRIC_DURATION_COLUMNS,
- allow_custom_measurements=False,
- ),
- ),
- ],
- snql_metric_layer=lambda args, alias: function_aliases.resolve_metrics_layer_percentile(
- args=args, alias=alias, resolve_mri=self.resolve_mri, fixed_percentile=0.75
- ),
- default_result_type="duration",
- ),
- fields.MetricsFunction(
- "p95",
- optional_args=[
- fields.with_default(
- "span.self_time",
- fields.MetricArg(
- "column",
- allowed_columns=constants.SPAN_METRIC_DURATION_COLUMNS,
- allow_custom_measurements=False,
- ),
- ),
- ],
- snql_metric_layer=lambda args, alias: function_aliases.resolve_metrics_layer_percentile(
- args=args, alias=alias, resolve_mri=self.resolve_mri, fixed_percentile=0.95
- ),
- default_result_type="duration",
- ),
- fields.MetricsFunction(
- "p99",
- optional_args=[
- fields.with_default(
- "span.self_time",
- fields.MetricArg(
- "column",
- allowed_columns=constants.SPAN_METRIC_DURATION_COLUMNS,
- allow_custom_measurements=False,
- ),
- ),
- ],
- snql_metric_layer=lambda args, alias: function_aliases.resolve_metrics_layer_percentile(
- args=args, alias=alias, resolve_mri=self.resolve_mri, fixed_percentile=0.99
- ),
- default_result_type="duration",
- ),
- fields.MetricsFunction(
- "p100",
- optional_args=[
- fields.with_default(
- "span.self_time",
- fields.MetricArg(
- "column",
- allowed_columns=constants.SPAN_METRIC_DURATION_COLUMNS,
- allow_custom_measurements=False,
- ),
- ),
- ],
- snql_metric_layer=lambda args, alias: function_aliases.resolve_metrics_layer_percentile(
- args=args, alias=alias, resolve_mri=self.resolve_mri, fixed_percentile=1.0
- ),
- default_result_type="duration",
- ),
- fields.MetricsFunction(
- "http_error_count",
- snql_metric_layer=lambda args, alias: AliasedExpression(
- Column(
- SpanMRI.HTTP_ERROR_COUNT_LIGHT.value
- if not self.builder.has_transaction
- else SpanMRI.HTTP_ERROR_COUNT.value
- ),
- alias,
- ),
- default_result_type="integer",
- ),
- fields.MetricsFunction(
- "http_error_rate",
- snql_metric_layer=lambda args, alias: AliasedExpression(
- Column(
- SpanMRI.HTTP_ERROR_RATE_LIGHT.value
- if not self.builder.has_transaction
- else SpanMRI.HTTP_ERROR_RATE.value
- ),
- alias,
- ),
- default_result_type="percentage",
- ),
- ]
- }
-
- for alias, name in constants.SPAN_FUNCTION_ALIASES.items():
- if name in function_converter:
- function_converter[alias] = function_converter[name].alias_as(alias)
-
- return function_converter
-
- @property
- def orderby_converter(self) -> Mapping[str, OrderBy]:
- return {}
diff --git a/src/sentry/search/events/filter.py b/src/sentry/search/events/filter.py
index d0c8d4fc2bc73a..9f5b3b1289430d 100644
--- a/src/sentry/search/events/filter.py
+++ b/src/sentry/search/events/filter.py
@@ -73,7 +73,7 @@ def translate_transaction_status(val: str) -> str:
return SPAN_STATUS_NAME_TO_CODE[val]
-def to_list(value: list[str] | str) -> list[str]:
+def to_list[T](value: list[T] | T) -> list[T]:
if isinstance(value, list):
return value
return [value]
diff --git a/src/sentry/search/events/types.py b/src/sentry/search/events/types.py
index f4b8e4e4672311..abba86f4a621a5 100644
--- a/src/sentry/search/events/types.py
+++ b/src/sentry/search/events/types.py
@@ -65,9 +65,12 @@ class QueryFramework:
class EventsMeta(TypedDict):
+ datasetReason: NotRequired[str]
fields: dict[str, str]
tips: NotRequired[dict[str, str | None]]
isMetricsData: NotRequired[bool]
+ isMetricsExtractedData: NotRequired[bool]
+ discoverSplitDecision: NotRequired[str]
class EventsResponse(TypedDict):
@@ -100,6 +103,9 @@ def __post_init__(self) -> None:
# Only used in the trend query builder
self.aliases: dict[str, Alias] | None = {}
+ def __repr__(self) -> str:
+ return f""
+
def parse_stats_period(self) -> None:
if self.stats_period is not None:
self.end = django_timezone.now()
diff --git a/src/sentry/seer/similarity/utils.py b/src/sentry/seer/similarity/utils.py
index a7b8f088029581..49c5b0c417d610 100644
--- a/src/sentry/seer/similarity/utils.py
+++ b/src/sentry/seer/similarity/utils.py
@@ -5,6 +5,8 @@
from sentry import options
from sentry.eventstore.models import Event, GroupEvent
+from sentry.grouping.api import get_contributing_variant_and_component
+from sentry.grouping.variants import BaseVariant, ComponentVariant
from sentry.killswitches import killswitch_matches_context
from sentry.models.project import Project
from sentry.utils import metrics
@@ -15,11 +17,18 @@
MAX_FRAME_COUNT = 30
MAX_EXCEPTION_COUNT = 30
FULLY_MINIFIED_STACKTRACE_MAX_FRAME_COUNT = 20
-SEER_ELIGIBLE_PLATFORMS_EVENTS = frozenset(
+# Events' `platform` values are tested against this list before events are sent to Seer. Checking
+# this separately from backfill status allows us to backfill projects which have events from
+# multiple platforms, some supported and some not, and not worry about events from the unsupported
+# platforms getting sent to Seer during ingest.
+SEER_INELIGIBLE_EVENT_PLATFORMS = frozenset(["other"]) # We don't know what's in the event
+# Event platforms corresponding to project platforms which were backfilled before we started
+# blocking events with more than `MAX_FRAME_COUNT` frames from being sent to Seer (which we do to
+# prevent possible over-grouping). Ultimately we want a more unified solution, but for now, we're
+# just not going to apply the filter to events from these platforms.
+EVENT_PLATFORMS_BYPASSING_FRAME_COUNT_CHECK = frozenset(
[
- "csharp",
"go",
- "java",
"javascript",
"node",
"php",
@@ -27,136 +36,14 @@
"ruby",
]
)
-# An original set of platforms were backfilled allowing more than 30 system contributing frames
-# being set to seer. Unfortunately, this can cause over grouping. We will need to reduce
-# these set of platforms but for now we will blacklist them.
-SYSTEM_FRAME_CHECK_BLACKLIST_PLATFORMS = frozenset(
+# Existing projects with these platforms shouldn't be backfilled and new projects with these
+# platforms shouldn't have Seer enabled.
+SEER_INELIGIBLE_PROJECT_PLATFORMS = frozenset(
[
- "bun",
- "cordova",
- "deno",
- "django",
- "go",
- "go-echo",
- "go-fasthttp",
- "go-fiber",
- "go-gin",
- "go-http",
- "go-iris",
- "go-martini",
- "go-negroni",
- "ionic",
- "javascript",
- "javascript-angular",
- "javascript-angularjs",
- "javascript-astro",
- "javascript-backbone",
- "javascript-browser",
- "javascript-electron",
- "javascript-ember",
- "javascript-gatsby",
- "javascript-nextjs",
- "javascript-performance-onboarding-1-install",
- "javascript-performance-onboarding-2-configure",
- "javascript-performance-onboarding-3-verify",
- "javascript-react",
- "javascript-react-performance-onboarding-1-install",
- "javascript-react-performance-onboarding-2-configure",
- "javascript-react-performance-onboarding-3-verify",
- "javascript-react-with-error-monitoring",
- "javascript-react-with-error-monitoring-performance-and-replay",
- "javascript-remix",
- "javascript-replay-onboarding-1-install",
- "javascript-replay-onboarding-2-configure",
- "javascript-solid",
- "javascript-svelte",
- "javascript-sveltekit",
- "javascript-vue",
- "javascript-vue-with-error-monitoring",
- "node",
- "node-awslambda",
- "node-azurefunctions",
- "node-connect",
- "node-express",
- "node-fastify",
- "node-gcpfunctions",
- "node-hapi",
- "node-koa",
- "node-nestjs",
- "node-nodeawslambda",
- "node-nodegcpfunctions",
- "node-profiling-onboarding-0-alert",
- "node-profiling-onboarding-1-install",
- "node-profiling-onboarding-2-configure-performance",
- "node-profiling-onboarding-3-configure-profiling",
- "node-serverlesscloud",
- "PHP",
- "php",
- "php-laravel",
- "php-monolog",
- "php-symfony",
- "php-symfony2",
- "python",
- "python-aiohttp",
- "python-asgi",
- "python-awslambda",
- "python-azurefunctions",
- "python-bottle",
- "python-celery",
- "python-chalice",
- "python-django",
- "python-falcon",
- "python-fastapi",
- "python-flask",
- "python-gcpfunctions",
- "python-profiling-onboarding-0-alert",
- "python-profiling-onboarding-1-install",
- "python-profiling-onboarding-3-configure-profiling",
- "python-pylons",
- "python-pymongo",
- "python-pyramid",
- "python-pythonawslambda",
- "python-pythonazurefunctions",
- "python-pythongcpfunctions",
- "python-pythonserverless",
- "python-quart",
- "python-rq",
- "python-sanic",
- "python-serverless",
- "python-starlette",
- "python-tornado",
- "python-tryton",
- "python-wsgi",
- "react",
- "react-native",
- "react-native-tracing",
- "ruby",
- "ruby-rack",
- "ruby-rails",
- ]
-)
-SEER_ELIGIBLE_PLATFORMS = SYSTEM_FRAME_CHECK_BLACKLIST_PLATFORMS | frozenset(
- [
- "android",
- "android-profiling-onboarding-1-install",
- "android-profiling-onboarding-3-configure-profiling",
- "android-profiling-onboarding-4-upload",
- "csharp",
- "csharp-aspnetcore",
- "dart",
- "dotnet",
- "flutter",
- "groovy",
- "java",
- "java-android",
- "java-appengine",
- "java-log4j",
- "java-log4j2",
- "java-logging",
- "java-logback",
- "java-spring",
- "java-spring-boot",
- "perl",
+ # We have no clue what's in these projects
+ "other",
+ "",
+ None,
]
)
BASE64_ENCODED_PREFIXES = [
@@ -170,6 +57,8 @@
class ReferrerOptions(StrEnum):
INGEST = "ingest"
BACKFILL = "backfill"
+ DELETION = "deletion"
+ SIMILAR_ISSUES_TAB = "similar_issues_tab"
class TooManyOnlySystemFramesException(Exception):
@@ -236,9 +125,8 @@ def get_stacktrace_string(data: dict[str, Any], platform: str | None = None) ->
exception, frame_metrics
)
if (
- platform not in SYSTEM_FRAME_CHECK_BLACKLIST_PLATFORMS
+ platform not in EVENT_PLATFORMS_BYPASSING_FRAME_COUNT_CHECK
and frame_metrics["is_frames_truncated"]
- and not app_hash
):
raise TooManyOnlySystemFramesException
@@ -282,15 +170,12 @@ def generate_stacktrace_string(
},
)
- # Metric for errors with no header, only one frame and no filename
- # TODO: Determine how often this occurs and if we should send to seer, then remove metric
+ # Return empty stacktrace for events with no header, only one frame and no filename
+ # since this is too little info to group on
if frame_metrics["has_no_filename"] and len(result_parts) == 1:
header, frames = result_parts[0][0], result_parts[0][1]
if header == "" and len(frames) == 1:
- metrics.incr(
- "seer.grouping.no_header_one_frame_no_filename",
- sample_rate=options.get("seer.similarity.metrics_sample_rate"),
- )
+ stacktrace_str = ""
return stacktrace_str.strip()
@@ -397,7 +282,6 @@ def get_stacktrace_string_with_metrics(
data: dict[str, Any], platform: str | None, referrer: ReferrerOptions
) -> str | None:
stacktrace_string = None
- key = "grouping.similarity.did_call_seer"
sample_rate = options.get("seer.similarity.metrics_sample_rate")
try:
stacktrace_string = get_stacktrace_string(data, platform)
@@ -409,11 +293,7 @@ def get_stacktrace_string_with_metrics(
tags={"platform": platform, "referrer": referrer},
)
if referrer == ReferrerOptions.INGEST:
- metrics.incr(
- key,
- sample_rate=sample_rate,
- tags={"call_made": False, "blocker": "over-threshold-only-system-frames"},
- )
+ record_did_call_seer_metric(call_made=False, blocker="over-threshold-frames")
except Exception:
logger.exception("Unexpected exception in stacktrace string formatting")
@@ -430,78 +310,114 @@ def event_content_has_stacktrace(event: GroupEvent | Event) -> bool:
return exception_stacktrace or threads_stacktrace or only_stacktrace
-def event_content_is_seer_eligible(event: GroupEvent | Event) -> bool:
- """
- Determine if an event's contents makes it fit for using with Seer's similar issues model.
- """
- # TODO: Determine if we want to filter out non-sourcemapped events
- if not event_content_has_stacktrace(event):
+def record_did_call_seer_metric(*, call_made: bool, blocker: str) -> None:
+ metrics.incr(
+ "grouping.similarity.did_call_seer",
+ sample_rate=options.get("seer.similarity.metrics_sample_rate"),
+ tags={"call_made": call_made, "blocker": blocker},
+ )
+
+
+def has_too_many_contributing_frames(
+ event: Event | GroupEvent,
+ variants: dict[str, BaseVariant],
+ referrer: ReferrerOptions,
+) -> bool:
+ platform = event.platform
+ shared_tags = {"referrer": referrer.value, "platform": platform}
+
+ contributing_variant, contributing_component = get_contributing_variant_and_component(variants)
+
+ # Ideally we're calling this function after we already know the event both has a stacktrace and
+ # is using it for grouping (in which case none of the below conditions should apply), but still
+ # worth checking that we have enough information to answer the question just in case
+ if (
+ # Fingerprint, checksum, fallback variants
+ not isinstance(contributing_variant, ComponentVariant)
+ # Security violations, log-message-based grouping
+ or contributing_variant.variant_name == "default"
+ # Any ComponentVariant will have this, but this reassures mypy
+ or not contributing_component
+ # Exception-message-based grouping
+ or not hasattr(contributing_component, "frame_counts")
+ ):
+ # We don't bother to collect a metric on this outcome, because we shouldn't have called the
+ # function in the first place
+ return False
+
+ # Certain platforms were backfilled before we added this filter, so to keep new events matching
+ # with the existing data, we turn off the filter for them (instead their stacktraces will be
+ # truncated)
+ if platform in EVENT_PLATFORMS_BYPASSING_FRAME_COUNT_CHECK:
metrics.incr(
- "grouping.similarity.event_content_seer_eligible",
+ "grouping.similarity.frame_count_filter",
sample_rate=options.get("seer.similarity.metrics_sample_rate"),
- tags={"eligible": False, "blocker": "no-stacktrace"},
+ tags={**shared_tags, "outcome": "bypass"},
)
return False
- if event.platform not in SEER_ELIGIBLE_PLATFORMS_EVENTS:
+ stacktrace_type = "in_app" if contributing_variant.variant_name == "app" else "system"
+ key = f"{stacktrace_type}_contributing_frames"
+ shared_tags["stacktrace_type"] = stacktrace_type
+
+ if contributing_component.frame_counts[key] > MAX_FRAME_COUNT:
metrics.incr(
- "grouping.similarity.event_content_seer_eligible",
+ "grouping.similarity.frame_count_filter",
sample_rate=options.get("seer.similarity.metrics_sample_rate"),
- tags={"eligible": False, "blocker": "unsupported-platform"},
+ tags={**shared_tags, "outcome": "block"},
)
- return False
+ return True
metrics.incr(
- "grouping.similarity.event_content_seer_eligible",
+ "grouping.similarity.frame_count_filter",
sample_rate=options.get("seer.similarity.metrics_sample_rate"),
- tags={"eligible": True, "blocker": "none"},
+ tags={**shared_tags, "outcome": "pass"},
)
- return True
+ return False
-def killswitch_enabled(project_id: int, event: GroupEvent | Event | None = None) -> bool:
+def killswitch_enabled(
+ project_id: int | None,
+ referrer: ReferrerOptions,
+ event: GroupEvent | Event | None = None,
+) -> bool:
"""
Check both the global and similarity-specific Seer killswitches.
"""
-
+ is_ingest = referrer == ReferrerOptions.INGEST
+ logger_prefix = f"grouping.similarity.{referrer.value}"
logger_extra = {"event_id": event.event_id if event else None, "project_id": project_id}
if options.get("seer.global-killswitch.enabled"):
logger.warning(
- "should_call_seer_for_grouping.seer_global_killswitch_enabled",
+ f"{logger_prefix}.seer_global_killswitch_enabled", # noqa
extra=logger_extra,
)
- metrics.incr(
- "grouping.similarity.did_call_seer",
- sample_rate=options.get("seer.similarity.metrics_sample_rate"),
- tags={"call_made": False, "blocker": "global-killswitch"},
- )
+ if is_ingest:
+ record_did_call_seer_metric(call_made=False, blocker="global-killswitch")
+
return True
if options.get("seer.similarity-killswitch.enabled"):
logger.warning(
- "should_call_seer_for_grouping.seer_similarity_killswitch_enabled",
+ f"{logger_prefix}.seer_similarity_killswitch_enabled", # noqa
extra=logger_extra,
)
- metrics.incr(
- "grouping.similarity.did_call_seer",
- sample_rate=options.get("seer.similarity.metrics_sample_rate"),
- tags={"call_made": False, "blocker": "similarity-killswitch"},
- )
+ if is_ingest:
+ record_did_call_seer_metric(call_made=False, blocker="similarity-killswitch")
+
return True
if killswitch_matches_context(
"seer.similarity.grouping_killswitch_projects", {"project_id": project_id}
):
logger.warning(
- "should_call_seer_for_grouping.seer_similarity_project_killswitch_enabled",
+ f"{logger_prefix}.seer_similarity_project_killswitch_enabled", # noqa
extra=logger_extra,
)
- metrics.incr(
- "grouping.similarity.did_call_seer",
- sample_rate=options.get("seer.similarity.metrics_sample_rate"),
- tags={"call_made": False, "blocker": "project-killswitch"},
- )
+ if is_ingest:
+ record_did_call_seer_metric(call_made=False, blocker="project-killswitch")
+
return True
return False
@@ -543,7 +459,7 @@ def project_is_seer_eligible(project: Project) -> bool:
the feature is enabled in the region.
"""
is_backfill_completed = project.get_option("sentry:similarity_backfill_completed")
- is_seer_eligible_platform = project.platform in SEER_ELIGIBLE_PLATFORMS
+ is_seer_eligible_platform = project.platform not in SEER_INELIGIBLE_PROJECT_PLATFORMS
is_region_enabled = options.get("similarity.new_project_seer_grouping.enabled")
return not is_backfill_completed and is_seer_eligible_platform and is_region_enabled
diff --git a/src/sentry/sentry_apps/api/bases/sentryapps.py b/src/sentry/sentry_apps/api/bases/sentryapps.py
index 1ce641f943707d..f2046c61081b9f 100644
--- a/src/sentry/sentry_apps/api/bases/sentryapps.py
+++ b/src/sentry/sentry_apps/api/bases/sentryapps.py
@@ -120,7 +120,7 @@ class SentryAppsBaseEndpoint(IntegrationPlatformEndpoint):
permission_classes: tuple[type[BasePermission], ...] = (SentryAppsAndStaffPermission,)
def _get_organization_slug(self, request: Request):
- organization_slug = request.json_body.get("organization")
+ organization_slug = request.data.get("organization")
if not organization_slug or not isinstance(organization_slug, str):
error_message = "Please provide a valid value for the 'organization' field."
raise ValidationError({"organization": to_single_line_str(error_message)})
@@ -179,7 +179,7 @@ def convert_args(self, request: Request, *args, **kwargs):
objects from URI params, we're applying the same logic for a param in
the request body.
"""
- if not request.json_body:
+ if not request.data:
return (args, kwargs)
context = self._get_org_context(request)
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py
index 8f999d7eb1fc95..05b1aad13758d1 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py
@@ -46,7 +46,7 @@ def post(self, request: Request, installation) -> Response:
scope.set_tag("sentry_app_slug", installation.sentry_app.slug)
try:
- if request.json_body.get("grant_type") == GrantTypes.AUTHORIZATION:
+ if request.data.get("grant_type") == GrantTypes.AUTHORIZATION:
auth_serializer: SentryAppAuthorizationSerializer = (
SentryAppAuthorizationSerializer(data=request.data)
)
@@ -60,7 +60,7 @@ def post(self, request: Request, installation) -> Response:
client_id=auth_serializer.validated_data.get("client_id"),
user=promote_request_api_user(request),
).run()
- elif request.json_body.get("grant_type") == GrantTypes.REFRESH:
+ elif request.data.get("grant_type") == GrantTypes.REFRESH:
refresh_serializer = SentryAppRefreshAuthorizationSerializer(data=request.data)
if not refresh_serializer.is_valid():
@@ -87,7 +87,7 @@ def post(self, request: Request, installation) -> Response:
)
return Response({"error": e.msg or "Unauthorized"}, status=403)
- attrs = {"state": request.json_body.get("state"), "application": None}
+ attrs = {"state": request.data.get("state"), "application": None}
body = ApiTokenSerializer().serialize(token, attrs, promote_request_api_user(request))
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_details.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_details.py
index 09aae78ab8fbb1..f972ee75d74283 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_details.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_details.py
@@ -192,7 +192,7 @@ def delete(self, request: Request, sentry_app) -> Response:
return Response({"detail": ["Published apps cannot be removed."]}, status=403)
def _has_hook_events(self, request: Request):
- if not request.json_body.get("events"):
+ if not request.data.get("events"):
return False
- return "error" in request.json_body["events"]
+ return "error" in request.data["events"]
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_apps.py b/src/sentry/sentry_apps/api/endpoints/sentry_apps.py
index 1ebe56f95d6374..eecbb92c7f8c80 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_apps.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_apps.py
@@ -71,22 +71,22 @@ def get(self, request: Request) -> Response:
def post(self, request: Request, organization) -> Response:
data = {
- "name": request.json_body.get("name"),
+ "name": request.data.get("name"),
"user": request.user,
- "author": request.json_body.get("author"),
+ "author": request.data.get("author"),
"organization": organization,
- "webhookUrl": request.json_body.get("webhookUrl"),
- "redirectUrl": request.json_body.get("redirectUrl"),
- "isAlertable": request.json_body.get("isAlertable"),
- "isInternal": request.json_body.get("isInternal"),
- "verifyInstall": request.json_body.get("verifyInstall"),
- "scopes": request.json_body.get("scopes", []),
- "events": request.json_body.get("events", []),
- "schema": request.json_body.get("schema", {}),
- "overview": request.json_body.get("overview"),
- "allowedOrigins": request.json_body.get("allowedOrigins", []),
+ "webhookUrl": request.data.get("webhookUrl"),
+ "redirectUrl": request.data.get("redirectUrl"),
+ "isAlertable": request.data.get("isAlertable"),
+ "isInternal": request.data.get("isInternal"),
+ "verifyInstall": request.data.get("verifyInstall"),
+ "scopes": request.data.get("scopes", []),
+ "events": request.data.get("events", []),
+ "schema": request.data.get("schema", {}),
+ "overview": request.data.get("overview"),
+ "allowedOrigins": request.data.get("allowedOrigins", []),
"popularity": (
- request.json_body.get("popularity") if is_active_superuser(request) else None
+ request.data.get("popularity") if is_active_superuser(request) else None
),
}
@@ -166,7 +166,7 @@ def _filter_queryset_for_user(self, queryset: BaseQuerySet[SentryApp, SentryApp]
return queryset.filter(owner_id__in=owner_ids)
def _has_hook_events(self, request: Request):
- if not request.json_body.get("events"):
+ if not request.data.get("events"):
return False
- return "error" in request.json_body["events"]
+ return "error" in request.data["events"]
diff --git a/src/sentry/sentry_apps/services/app/model.py b/src/sentry/sentry_apps/services/app/model.py
index 20609618b50841..b9ac92bccc638b 100644
--- a/src/sentry/sentry_apps/services/app/model.py
+++ b/src/sentry/sentry_apps/services/app/model.py
@@ -18,8 +18,8 @@
class RpcApiApplication(RpcModel):
id: int = -1
- client_id: str = ""
- client_secret: str = ""
+ client_id: str = Field(repr=False, default="")
+ client_secret: str = Field(repr=False, default="")
class RpcSentryAppService(RpcModel):
diff --git a/src/sentry/sentry_apps/utils/errors.py b/src/sentry/sentry_apps/utils/errors.py
new file mode 100644
index 00000000000000..af43d04c00b01d
--- /dev/null
+++ b/src/sentry/sentry_apps/utils/errors.py
@@ -0,0 +1,35 @@
+from enum import Enum
+
+
+class SentryAppErrorType(Enum):
+ CLIENT = "client"
+ INTEGRATOR = "integrator"
+ SENTRY = "sentry"
+
+
+# Represents a user/client error that occured during a Sentry App process
+class SentryAppError(Exception):
+ error_type = SentryAppErrorType.CLIENT
+ status_code = 400
+
+ def __init__(
+ self,
+ error: Exception | None = None,
+ status_code: int | None = None,
+ ) -> None:
+ if status_code:
+ self.status_code = status_code
+
+
+# Represents an error caused by a 3p integrator during a Sentry App process
+class SentryAppIntegratorError(Exception):
+ error_type = SentryAppErrorType.INTEGRATOR
+ status_code = 400
+
+ def __init__(
+ self,
+ error: Exception | None = None,
+ status_code: int | None = None,
+ ) -> None:
+ if status_code:
+ self.status_code = status_code
diff --git a/src/sentry/sentry_metrics/configuration.py b/src/sentry/sentry_metrics/configuration.py
index 0f812f29362341..8c8b63752f26f5 100644
--- a/src/sentry/sentry_metrics/configuration.py
+++ b/src/sentry/sentry_metrics/configuration.py
@@ -27,8 +27,6 @@ class UseCaseKey(Enum):
# backwards compatibility
RELEASE_HEALTH_PG_NAMESPACE = "releasehealth"
PERFORMANCE_PG_NAMESPACE = "performance"
-RELEASE_HEALTH_CS_NAMESPACE = "releasehealth.cs"
-PERFORMANCE_CS_NAMESPACE = "performance.cs"
RELEASE_HEALTH_SCHEMA_VALIDATION_RULES_OPTION_NAME = (
"sentry-metrics.indexer.release-health.schema-validation-rules"
@@ -172,23 +170,3 @@ def initialize_main_process_state(config: MetricsIngestConfiguration) -> None:
global_tag_map = {"pipeline": config.internal_metrics_tag or ""}
add_global_tags(_all_threads=True, **global_tag_map)
-
-
-HARD_CODED_UNITS = {"span.duration": "millisecond"}
-ALLOWED_TYPES = {"c", "d", "s", "g"}
-
-# METRICS_AGGREGATES specifies the aggregates that are available for a metric type - AGGREGATES_TO_METRICS reverses this,
-# and provides a map from the aggregate to the metric type in the form {'count': 'c', 'avg':'g', ...}. This is needed
-# when the UI lets the user select the aggregate, and the backend infers the metric_type from it. It is programmatic
-# and not hard-coded, so that in case of a change, the two mappings are aligned.
-METRIC_TYPE_TO_AGGREGATE = {
- "c": ["count"],
- "g": ["avg", "min", "max", "sum"],
- "d": ["p50", "p75", "p90", "p95", "p99"],
- "s": ["count_unique"],
-}
-AGGREGATE_TO_METRIC_TYPE = {
- aggregate: metric_type
- for metric_type, aggregate_list in METRIC_TYPE_TO_AGGREGATE.items()
- for aggregate in aggregate_list
-}
diff --git a/src/sentry/sentry_metrics/consumers/indexer/batch.py b/src/sentry/sentry_metrics/consumers/indexer/batch.py
index ef52a8ffe1b44b..707a0a643ab291 100644
--- a/src/sentry/sentry_metrics/consumers/indexer/batch.py
+++ b/src/sentry/sentry_metrics/consumers/indexer/batch.py
@@ -1,7 +1,7 @@
import logging
import random
from collections import defaultdict
-from collections.abc import Callable, Iterable, Mapping, MutableMapping, MutableSequence, Sequence
+from collections.abc import Callable, Iterable, Mapping, MutableMapping, MutableSequence
from dataclasses import dataclass
from typing import Any, cast
@@ -248,27 +248,6 @@ def _validate_message(self, parsed_payload: ParsedMessage) -> None:
)
raise ValueError(f"Invalid metric tags: {tags}")
- @metrics.wraps("process_messages.filter_messages")
- def filter_messages(self, keys_to_remove: Sequence[BrokerMeta]) -> None:
- # XXX: it is useful to be able to get a sample of organization ids that are affected by rate limits, but this is really slow.
- for broker_meta in keys_to_remove:
- if _should_sample_debug_log():
- sentry_sdk.set_tag(
- "sentry_metrics.organization_id",
- self.parsed_payloads_by_meta[broker_meta]["org_id"],
- )
- sentry_sdk.set_tag(
- "sentry_metrics.metric_name", self.parsed_payloads_by_meta[broker_meta]["name"]
- )
- logger.error(
- "process_messages.dropped_message",
- extra={
- "reason": "cardinality_limit",
- },
- )
-
- self.filtered_msg_meta.update(keys_to_remove)
-
@metrics.wraps("process_messages.extract_strings")
def extract_strings(self) -> Mapping[UseCaseID, Mapping[OrgId, set[str]]]:
strings: Mapping[UseCaseID, Mapping[OrgId, set[str]]] = defaultdict(
diff --git a/src/sentry/sentry_metrics/consumers/indexer/common.py b/src/sentry/sentry_metrics/consumers/indexer/common.py
index 9b0a18c8281fb9..54fbfdf066520d 100644
--- a/src/sentry/sentry_metrics/consumers/indexer/common.py
+++ b/src/sentry/sentry_metrics/consumers/indexer/common.py
@@ -26,9 +26,6 @@ class BrokerMeta(NamedTuple):
logger = logging.getLogger(__name__)
-DEFAULT_QUEUED_MAX_MESSAGE_KBYTES = 50000
-DEFAULT_QUEUED_MIN_MESSAGES = 100000
-
@dataclass(frozen=True)
class IndexerOutputMessageBatch:
diff --git a/src/sentry/sentry_metrics/extraction_rules.py b/src/sentry/sentry_metrics/extraction_rules.py
deleted file mode 100644
index 38246de5e50dc4..00000000000000
--- a/src/sentry/sentry_metrics/extraction_rules.py
+++ /dev/null
@@ -1,80 +0,0 @@
-from __future__ import annotations
-
-from collections.abc import Mapping
-from dataclasses import dataclass
-from typing import Any
-
-from sentry.sentry_metrics.configuration import (
- AGGREGATE_TO_METRIC_TYPE,
- ALLOWED_TYPES,
- HARD_CODED_UNITS,
-)
-from sentry.sentry_metrics.use_case_utils import string_to_use_case_id
-
-METRICS_EXTRACTION_RULES_OPTION_KEY = "sentry:metrics_extraction_rules"
-SPAN_ATTRIBUTE_PREFIX = "span_attribute_"
-
-
-class MetricsExtractionRuleValidationError(ValueError):
- pass
-
-
-@dataclass
-class MetricsExtractionRule:
- def __init__(
- self,
- span_attribute: str,
- type: str,
- unit: str,
- tags: set[str],
- condition: str,
- id: int,
- ):
- self.span_attribute = self.validate_span_attribute(span_attribute)
- self.type = self.validate_type(type)
- self.unit = HARD_CODED_UNITS.get(span_attribute, unit)
- self.tags = set(tags)
- self.condition = condition
- self.id = id
-
- def validate_span_attribute(self, span_attribute: str) -> str:
- if not isinstance(span_attribute, str):
- raise ValueError("The span attribute must be of type string.")
- return span_attribute
-
- def validate_type(self, type_value: str) -> str:
- if not isinstance(type_value, str):
- raise ValueError("The type must be of type string.")
-
- if type_value not in ALLOWED_TYPES:
- raise ValueError(
- "Type can only have the following values: 'c' for counter, 'd' for distribution, 'g' for gauge, or 's' for set."
- )
- return type_value
-
- @classmethod
- def infer_types(self, aggregates: set[str]) -> set[str]:
- types: set[str] = set()
- for aggregate in aggregates:
- if new_type := AGGREGATE_TO_METRIC_TYPE.get(aggregate):
- types.add(new_type)
-
- return types
-
- def to_dict(self) -> Mapping[str, Any]:
- return {
- "spanAttribute": self.span_attribute,
- "type": self.type,
- "unit": self.unit,
- "tags": self.tags,
- "condition": self.condition,
- "id": self.id,
- }
-
- def generate_mri(self, use_case: str = "custom"):
- """Generate the Metric Resource Identifier (MRI) associated with the extraction rule."""
- use_case_id = string_to_use_case_id(use_case)
- return f"{self.type}:{use_case_id.value}/{SPAN_ATTRIBUTE_PREFIX}{self.id}@none"
-
- def __hash__(self):
- return hash(self.generate_mri())
diff --git a/src/sentry/sentry_metrics/indexer/id_generator.py b/src/sentry/sentry_metrics/indexer/id_generator.py
deleted file mode 100644
index db0514468d14d9..00000000000000
--- a/src/sentry/sentry_metrics/indexer/id_generator.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import random
-import time
-
-_VERSION_BITS = 4
-_TS_BITS = 32
-_RANDOM_BITS = 28
-_TOTAL_BITS = _VERSION_BITS + _TS_BITS + _RANDOM_BITS
-assert _TOTAL_BITS == 64
-
-_VERSION = 2
-
-# Warning! The version must be an even number as this is already
-# written to a BigInt field in Postgres
-assert _VERSION % 2 == 0
-
-# 1st January 2022
-_INDEXER_EPOCH_START = 1641024000
-
-
-def reverse_bits(number: int, bit_size: int) -> int:
- return int(bin(number)[2:].zfill(bit_size)[::-1], 2)
-
-
-# we will have room b/n version and time since for a while
-# so let's reverse the version bits to grow to the right
-# instead of left should we need more than 3 bits for version
-
-_VERSION_PREFIX = reverse_bits(_VERSION, _VERSION_BITS)
-
-
-def get_id() -> int:
- """
- Generates IDs for use by indexer storages that do not have autoincrement sequences.
-
- This function does not provide any guarantee of uniqueness, just a low probability of collisions.
- It relies on the database to be strongly consistent and reject writes with duplicate IDs. These should
- be retried with a newly generated ID.
-
- The ID generated is in roughly incrementing order.
-
- Metric IDs are 64 bit but this function only generates IDs that fit in 63 bits. The leading bit is always zero.
- This is because they were stored in Postgres as BigInt (signed 64 bit) and we do not want to change that now.
- In ClickHouse it is an unsigned 64 bit integer.
- """
-
- now = int(time.time())
- time_since_epoch = now - _INDEXER_EPOCH_START
- rand = random.getrandbits(_RANDOM_BITS)
-
- id = _VERSION_PREFIX << (_TOTAL_BITS - _VERSION_BITS)
- id |= time_since_epoch << (_TOTAL_BITS - _VERSION_BITS - _TS_BITS)
- id |= rand
-
- return id
diff --git a/src/sentry/sentry_metrics/indexer/postgres/models.py b/src/sentry/sentry_metrics/indexer/postgres/models.py
index 7575951f74c3a5..142af00288e919 100644
--- a/src/sentry/sentry_metrics/indexer/postgres/models.py
+++ b/src/sentry/sentry_metrics/indexer/postgres/models.py
@@ -1,8 +1,8 @@
import logging
-from typing import Any, ClassVar, Self
+from typing import ClassVar, Self
from django.conf import settings
-from django.db import connections, models, router
+from django.db import models
from django.utils import timezone
from sentry.backup.scopes import RelocationScope
@@ -16,35 +16,6 @@
from collections.abc import Mapping
-@region_silo_model
-class MetricsKeyIndexer(Model):
- __relocation_scope__ = RelocationScope.Excluded
-
- string = models.CharField(max_length=200)
- date_added = models.DateTimeField(default=timezone.now)
-
- objects: ClassVar[BaseManager[Self]] = BaseManager(
- cache_fields=("pk", "string"), cache_ttl=settings.SENTRY_METRICS_INDEXER_CACHE_TTL
- )
-
- class Meta:
- db_table = "sentry_metricskeyindexer"
- app_label = "sentry"
- constraints = [
- models.UniqueConstraint(fields=["string"], name="unique_string"),
- ]
-
- @classmethod
- def get_next_values(cls, num: int) -> Any:
- using = router.db_for_write(cls)
- connection = connections[using].cursor()
-
- connection.execute(
- "SELECT nextval('sentry_metricskeyindexer_id_seq') from generate_series(1,%s)", [num]
- )
- return connection.fetchall()
-
-
class BaseIndexer(Model):
string = models.CharField(max_length=MAX_INDEXED_COLUMN_LENGTH)
organization_id = BoundedBigIntegerField()
diff --git a/src/sentry/sentry_metrics/indexer/postgres/postgres_v2.py b/src/sentry/sentry_metrics/indexer/postgres/postgres_v2.py
index 2eaac04915ff6c..8155a49ef506b0 100644
--- a/src/sentry/sentry_metrics/indexer/postgres/postgres_v2.py
+++ b/src/sentry/sentry_metrics/indexer/postgres/postgres_v2.py
@@ -31,7 +31,6 @@
__all__ = ["PostgresIndexer"]
-_INDEXER_CACHE_METRIC = "sentry_metrics.indexer.memcache"
_INDEXER_DB_METRIC = "sentry_metrics.indexer.postgres"
_PARTITION_KEY = "pg"
diff --git a/src/sentry/sentry_metrics/querying/constants.py b/src/sentry/sentry_metrics/querying/constants.py
index 64ed5b42be3f9a..508202c3262d84 100644
--- a/src/sentry/sentry_metrics/querying/constants.py
+++ b/src/sentry/sentry_metrics/querying/constants.py
@@ -2,17 +2,6 @@
# Snuba can return at most 10.000 rows.
SNUBA_QUERY_LIMIT = 10000
-# Intervals in seconds which are used by the product to query data.
-DEFAULT_QUERY_INTERVALS = [
- 60 * 60 * 24, # 1 day
- 60 * 60 * 12, # 12 hours
- 60 * 60 * 4, # 4 hours
- 60 * 60 * 2, # 2 hours
- 60 * 60, # 1 hour
- 60 * 30, # 30 min
- 60 * 5, # 5 min
- 60, # 1 min
-]
# Operators in formulas that use coefficients.
COEFFICIENT_OPERATORS = {
ArithmeticOperator.DIVIDE.value,
diff --git a/src/sentry/sentry_metrics/querying/data/mapping/base.py b/src/sentry/sentry_metrics/querying/data/mapping/base.py
index e241dd0229a571..b67384654bf093 100644
--- a/src/sentry/sentry_metrics/querying/data/mapping/base.py
+++ b/src/sentry/sentry_metrics/querying/data/mapping/base.py
@@ -1,6 +1,6 @@
import abc
from collections.abc import Sequence
-from typing import Any, TypeVar
+from typing import Any
from sentry.models.project import Project
@@ -26,9 +26,6 @@ def backward(self, projects: Sequence[Project], value: Any) -> Any:
return value
-TMapper = TypeVar("TMapper", bound=Mapper)
-
-
class MapperConfig:
def __init__(self):
self.mappers: set[type[Mapper]] = set()
diff --git a/src/sentry/sentry_metrics/querying/data/transformation/stats.py b/src/sentry/sentry_metrics/querying/data/transformation/stats.py
deleted file mode 100644
index 45cc44c37f4147..00000000000000
--- a/src/sentry/sentry_metrics/querying/data/transformation/stats.py
+++ /dev/null
@@ -1,48 +0,0 @@
-from collections.abc import Mapping, Sequence
-from dataclasses import dataclass
-from typing import Any
-
-from sentry.sentry_metrics.querying.data.execution import QueryResult
-from sentry.sentry_metrics.querying.data.transformation.base import QueryResultsTransformer
-from sentry.utils.outcomes import Outcome
-
-
-@dataclass(frozen=True)
-class MetricsOutcomesResult:
- series: Sequence[Mapping[str, Any]]
- totals: Sequence[Mapping[str, Any]]
-
-
-class MetricsOutcomesTransformer(QueryResultsTransformer[Mapping[str, Any]]):
- def transform_result(self, result: Sequence[Mapping[str, Any]]) -> Sequence[Mapping[str, Any]]:
- ret_val = []
-
- for item in result:
- ret_val_item = {}
- for key in item:
- if key == "outcome.id":
- outcome = int(item[key])
- ret_val_item["outcome"] = Outcome(outcome).api_name()
- elif key in "aggregate_value":
- ret_val_item["quantity"] = item[key]
- else:
- ret_val_item[key] = item[key]
-
- ret_val.append(ret_val_item)
-
- return ret_val
-
- def transform(self, query_results: Sequence[QueryResult]) -> Mapping[str, Any]:
- """
- Transforms the query results into the format returned by outcomes queries.
- Performs necessary mappings to match that format such as outcome.id -> outcome
-
- """
-
- if not query_results or len(query_results) == 0:
- return {"series": [], "totals": []}
-
- series = self.transform_result(query_results[0].series)
- totals = self.transform_result(query_results[0].totals)
-
- return {"series": series, "totals": totals}
diff --git a/src/sentry/sentry_metrics/querying/errors.py b/src/sentry/sentry_metrics/querying/errors.py
index 56ef0a2d916afd..365e2f628e7e84 100644
--- a/src/sentry/sentry_metrics/querying/errors.py
+++ b/src/sentry/sentry_metrics/querying/errors.py
@@ -8,7 +8,3 @@ class MetricsQueryExecutionError(Exception):
class LatestReleaseNotFoundError(Exception):
pass
-
-
-class CorrelationsQueryExecutionError(Exception):
- pass
diff --git a/src/sentry/sentry_metrics/querying/metadata/metrics.py b/src/sentry/sentry_metrics/querying/metadata/metrics.py
index e09e828ca2572a..09ba8d840968ca 100644
--- a/src/sentry/sentry_metrics/querying/metadata/metrics.py
+++ b/src/sentry/sentry_metrics/querying/metadata/metrics.py
@@ -18,13 +18,7 @@
from sentry.snuba.metrics import parse_mri
from sentry.snuba.metrics.datasource import get_metrics_blocking_state_of_projects
from sentry.snuba.metrics.naming_layer.mri import ParsedMRI, get_available_operations
-from sentry.snuba.metrics.utils import (
- BlockedMetric,
- MetricMeta,
- MetricOperationType,
- MetricType,
- MetricUnit,
-)
+from sentry.snuba.metrics.utils import BlockedMetric, MetricMeta, MetricType, MetricUnit
from sentry.snuba.metrics_layer.query import fetch_metric_mris
@@ -152,7 +146,7 @@ def _build_metric_meta(
name=parsed_mri.name,
unit=cast(MetricUnit, parsed_mri.unit),
mri=parsed_mri.mri_string,
- operations=cast(Sequence[MetricOperationType], available_operations),
+ operations=available_operations,
projectIds=project_ids,
blockingStatus=blocking_status,
)
diff --git a/src/sentry/sentry_metrics/querying/metadata/utils.py b/src/sentry/sentry_metrics/querying/metadata/utils.py
index 88d00b58c5623e..2a3e5520a374e4 100644
--- a/src/sentry/sentry_metrics/querying/metadata/utils.py
+++ b/src/sentry/sentry_metrics/querying/metadata/utils.py
@@ -1,7 +1,10 @@
+from __future__ import annotations
+
from sentry.snuba.metrics import get_mri
from sentry.snuba.metrics.naming_layer.mri import is_mri
+from sentry.snuba.metrics.utils import MetricOperationType
-METRICS_API_HIDDEN_OPERATIONS = {
+METRICS_API_HIDDEN_OPERATIONS: dict[str, list[MetricOperationType]] = {
"sentry:metrics_activate_percentiles": [
"p50",
"p75",
@@ -12,20 +15,21 @@
"sentry:metrics_activate_last_for_gauges": ["last"],
}
-NON_QUERYABLE_METRIC_OPERATIONS = ["histogram", "min_timestamp", "max_timestamp"]
+NON_QUERYABLE_METRIC_OPERATIONS: list[MetricOperationType] = [
+ "histogram",
+ "min_timestamp",
+ "max_timestamp",
+]
class OperationsConfiguration:
def __init__(self):
self.hidden_operations = set()
- def hide_operation(self, operation: str) -> None:
- self.hidden_operations.add(operation)
-
- def hide_operations(self, operations: list[str]) -> None:
+ def hide_operations(self, operations: list[MetricOperationType]) -> None:
self.hidden_operations.update(operations)
- def get_hidden_operations(self):
+ def get_hidden_operations(self) -> list[MetricOperationType]:
return list(self.hidden_operations)
diff --git a/src/sentry/sentry_metrics/querying/utils.py b/src/sentry/sentry_metrics/querying/utils.py
index 527cf26721bef8..e5304d44513634 100644
--- a/src/sentry/sentry_metrics/querying/utils.py
+++ b/src/sentry/sentry_metrics/querying/utils.py
@@ -1,5 +1,3 @@
-import re
-
from django.conf import settings
from rediscluster import RedisCluster
@@ -12,11 +10,3 @@ def get_redis_client_for_metrics_meta() -> RedisCluster:
"""
cluster_key = settings.SENTRY_METRIC_META_REDIS_CLUSTER
return redis.redis_clusters.get(cluster_key) # type: ignore[return-value]
-
-
-def remove_if_match(pattern, string: str) -> str:
- """
- Removes a pattern from a string.
- """
- # Use the re.sub function to replace the matched characters with an empty string
- return re.sub(pattern, "", string)
diff --git a/src/sentry/snuba/entity_subscription.py b/src/sentry/snuba/entity_subscription.py
index 08c7319179d86f..96bb7b5fcb1b46 100644
--- a/src/sentry/snuba/entity_subscription.py
+++ b/src/sentry/snuba/entity_subscription.py
@@ -161,6 +161,7 @@ def build_rpc_request(
environment: Environment | None,
params: ParamsType | None = None,
skip_field_validation_for_entity_subscription_deletion: bool = False,
+ referrer: str = Referrer.API_ALERTS_ALERT_RULE_CHART.value,
) -> TimeSeriesRequest:
raise NotImplementedError
@@ -293,6 +294,7 @@ def build_rpc_request(
environment: Environment | None,
params: ParamsType | None = None,
skip_field_validation_for_entity_subscription_deletion: bool = False,
+ referrer: str = Referrer.API_ALERTS_ALERT_RULE_CHART.value,
) -> TimeSeriesRequest:
if params is None:
params = {}
@@ -317,7 +319,7 @@ def build_rpc_request(
query_string=query,
y_axes=[self.aggregate],
groupby=[],
- referrer=Referrer.API_ALERTS_ALERT_RULE_CHART.value,
+ referrer=referrer,
config=SearchResolverConfig(),
granularity_secs=self.time_window,
)
diff --git a/src/sentry/snuba/errors.py b/src/sentry/snuba/errors.py
index 8466a1f40c2c78..96563bd52e6883 100644
--- a/src/sentry/snuba/errors.py
+++ b/src/sentry/snuba/errors.py
@@ -4,6 +4,7 @@
from typing import cast
import sentry_sdk
+from snuba_sdk import Column, Condition
from sentry.discover.arithmetic import categorize_columns
from sentry.exceptions import InvalidSearchQuery
@@ -30,29 +31,31 @@
def query(
- selected_columns,
- query,
- snuba_params,
- equations=None,
- orderby=None,
- offset=None,
- limit=50,
- referrer=None,
- auto_fields=False,
- auto_aggregations=False,
- include_equation_fields=False,
- allow_metric_aggregates=False,
- use_aggregate_conditions=False,
- conditions=None,
- functions_acl=None,
- transform_alias_to_input_format=False,
- sample=None,
- has_metrics=False,
- use_metrics_layer=False,
- skip_tag_resolution=False,
- on_demand_metrics_enabled=False,
+ selected_columns: list[str],
+ query: str,
+ snuba_params: SnubaParams,
+ equations: list[str] | None = None,
+ orderby: list[str] | None = None,
+ offset: int | None = None,
+ limit: int = 50,
+ referrer: str | None = None,
+ auto_fields: bool = False,
+ auto_aggregations: bool = False,
+ include_equation_fields: bool = False,
+ allow_metric_aggregates: bool = False,
+ use_aggregate_conditions: bool = False,
+ conditions: list[Condition] | None = None,
+ functions_acl: list[str] | None = None,
+ transform_alias_to_input_format: bool = False,
+ sample: float | None = None,
+ has_metrics: bool = False,
+ use_metrics_layer: bool = False,
+ skip_tag_resolution: bool = False,
+ extra_columns: list[Column] | None = None,
+ on_demand_metrics_enabled: bool = False,
on_demand_metrics_type: MetricSpecType | None = None,
- fallback_to_transactions=False,
+ dataset: Dataset = Dataset.Events,
+ fallback_to_transactions: bool = False,
query_source: QuerySource | None = None,
) -> EventsResponse:
if not selected_columns:
diff --git a/src/sentry/snuba/metrics/datasource.py b/src/sentry/snuba/metrics/datasource.py
index ea290073a10fb2..78729018fe3b8b 100644
--- a/src/sentry/snuba/metrics/datasource.py
+++ b/src/sentry/snuba/metrics/datasource.py
@@ -1,10 +1,3 @@
-from __future__ import annotations
-
-from functools import lru_cache
-
-import sentry_sdk
-from rest_framework.exceptions import NotFound
-
"""
Module that gets both metadata and time series from Snuba.
For metadata, it fetch metrics metadata (metric names, tag names, tag values, ...) from snuba.
@@ -13,12 +6,7 @@
efficient, we only look at the past 24 hours.
"""
-__all__ = (
- "get_all_tags",
- "get_tag_values",
- "get_series",
- "get_single_metric_info",
-)
+from __future__ import annotations
import logging
from collections import defaultdict, deque
@@ -29,14 +17,14 @@
from operator import itemgetter
from typing import Any
-from snuba_sdk import And, Column, Condition, Function, Op, Or, Query, Request
+import sentry_sdk
+from rest_framework.exceptions import NotFound
+from snuba_sdk import Column, Condition, Function, Op, Query, Request
from snuba_sdk.conditions import ConditionGroup
from sentry.exceptions import InvalidParams
from sentry.models.project import Project
from sentry.sentry_metrics import indexer
-from sentry.sentry_metrics.indexer.strings import PREFIX as SHARED_STRINGS_PREFIX
-from sentry.sentry_metrics.indexer.strings import SHARED_STRINGS
from sentry.sentry_metrics.use_case_id_registry import UseCaseID
from sentry.sentry_metrics.utils import (
MetricIndexNotFound,
@@ -49,7 +37,6 @@
from sentry.snuba.metrics.fields import run_metrics_query
from sentry.snuba.metrics.fields.base import (
SnubaDataType,
- build_metrics_query,
get_derived_metrics,
org_id_from_projects,
)
@@ -80,7 +67,15 @@
get_intervals,
to_intervals,
)
-from sentry.utils.snuba import bulk_snuba_queries, raw_snql_query
+from sentry.utils.snuba import raw_snql_query
+
+__all__ = (
+ "get_all_tags",
+ "get_tag_values",
+ "get_series",
+ "get_single_metric_info",
+)
+
logger = logging.getLogger(__name__)
@@ -107,133 +102,6 @@ def _get_metrics_for_entity(
)
-def _get_metrics_by_project_for_entity_query(
- entity_key: EntityKey,
- project_ids: Sequence[int],
- org_id: int,
- use_case_id: UseCaseID,
- start: datetime | None = None,
- end: datetime | None = None,
-) -> Request:
- where = [Condition(Column("use_case_id"), Op.EQ, use_case_id.value)]
- where.extend(_get_mri_constraints_for_use_case(entity_key, use_case_id))
-
- return build_metrics_query(
- entity_key=entity_key,
- select=[Column("project_id"), Column("metric_id")],
- groupby=[Column("project_id"), Column("metric_id")],
- where=where,
- project_ids=project_ids,
- org_id=org_id,
- use_case_id=use_case_id,
- start=start,
- end=end,
- )
-
-
-@lru_cache(maxsize=len(EntityKey) * len(UseCaseID))
-def _get_mri_constraints_for_use_case(entity_key: EntityKey, use_case_id: UseCaseID):
- # Sessions exist on a different infrastructure that works differently,
- # thus this optimization does not apply.
- if use_case_id == UseCaseID.SESSIONS:
- return []
-
- conditions = []
-
- # Look for the min/max of the metric id range for the given use case id to
- # constrain the search ClickHouse must do otherwise, it'll attempt a full scan.
- #
- # This assumes that metric ids are divided into non-overlapping ranges by the
- # use case id, so we can focus on a particular range for better performance.
- min_metric_id = SHARED_STRINGS_PREFIX << 1 # larger than possible metric ids
- max_metric_id = 0
-
- for mri, id in SHARED_STRINGS.items():
- parsed_mri = parse_mri(mri)
- if parsed_mri is not None and parsed_mri.namespace == use_case_id.value:
- min_metric_id = min(id, min_metric_id)
- max_metric_id = max(id, max_metric_id)
-
- # It's possible that there's a metric id within the use case that is not
- # hard coded so we should always check the range of custom metric ids.
- condition = Condition(Column("metric_id"), Op.LT, SHARED_STRINGS_PREFIX)
-
- # If we find a valid range, we extend the condition to check it as well.
- if min_metric_id <= max_metric_id:
- condition = Or(
- [
- condition,
- # Expand the search to include the range of the hard coded
- # metric ids if a valid range was found.
- And(
- [
- Condition(Column("metric_id"), Op.GTE, min_metric_id),
- Condition(Column("metric_id"), Op.LTE, max_metric_id),
- ]
- ),
- ]
- )
-
- conditions.append(condition)
-
- # This is added to every use case id because the MRI is the primary ORDER BY
- # on the table, and without it, these granules will be scanned no matter what
- # the use case id is.
- excluded_mris = []
-
- if use_case_id == UseCaseID.TRANSACTIONS:
- # This on_demand MRI takes up the majority of dataset and makes the query slow
- # because ClickHouse ends up scanning the whole table.
- #
- # These are used for on demand metrics extraction and end users should not
- # need to know about these metrics.
- #
- # As an optimization, we explicitly exclude these MRIs in the query to allow
- # Clickhouse to skip the granules containing strictly these MRIs.
- if entity_key == EntityKey.GenericMetricsCounters:
- excluded_mris.append("c:transactions/on_demand@none")
- elif entity_key == EntityKey.GenericMetricsDistributions:
- excluded_mris.append("d:transactions/on_demand@none")
- elif entity_key == EntityKey.GenericMetricsSets:
- excluded_mris.append("s:transactions/on_demand@none")
- elif entity_key == EntityKey.GenericMetricsGauges:
- excluded_mris.append("g:transactions/on_demand@none")
-
- if excluded_mris:
- conditions.append(
- Condition(
- Column("metric_id"),
- Op.NOT_IN,
- # these are shared strings, so just using org id 0 as a placeholder
- [indexer.resolve(use_case_id, 0, mri) for mri in excluded_mris],
- )
- )
-
- return conditions
-
-
-def _get_metrics_by_project_for_entity(
- entity_key: EntityKey,
- project_ids: Sequence[int],
- org_id: int,
- use_case_id: UseCaseID,
- start: datetime | None = None,
- end: datetime | None = None,
-) -> list[SnubaDataType]:
- return run_metrics_query(
- entity_key=entity_key,
- select=[Column("project_id"), Column("metric_id")],
- groupby=[Column("project_id"), Column("metric_id")],
- where=[Condition(Column("use_case_id"), Op.EQ, use_case_id.value)],
- referrer="snuba.metrics.get_metrics_names_for_entity",
- project_ids=project_ids,
- org_id=org_id,
- use_case_id=use_case_id,
- start=start,
- end=end,
- )
-
-
def get_available_derived_metrics(
projects: Sequence[Project],
supported_metric_ids_in_entities: dict[MetricType, Sequence[int]],
@@ -300,68 +168,6 @@ def get_metrics_blocking_state_of_projects(
return metrics_blocking_state_by_mri
-def get_stored_metrics_of_projects(
- projects: Sequence[Project],
- use_case_ids: Sequence[UseCaseID],
- start: datetime | None = None,
- end: datetime | None = None,
-) -> Mapping[str, Sequence[int]]:
- org_id = projects[0].organization_id
- project_ids = [project.id for project in projects]
-
- # We compute a list of all the queries that we want to run in parallel across entities and use cases.
- requests = []
- use_case_id_to_index = defaultdict(list)
- for use_case_id in use_case_ids:
- entity_keys = get_entity_keys_of_use_case_id(use_case_id=use_case_id)
- for entity_key in entity_keys or ():
- requests.append(
- _get_metrics_by_project_for_entity_query(
- entity_key=entity_key,
- project_ids=project_ids,
- org_id=org_id,
- use_case_id=use_case_id,
- start=start,
- end=end,
- )
- )
- use_case_id_to_index[use_case_id].append(len(requests) - 1)
-
- # We run the queries all in parallel.
- results = bulk_snuba_queries(
- requests=requests,
- referrer="snuba.metrics.datasource.get_stored_metrics_of_projects",
- use_cache=True,
- )
-
- # We reverse resolve all the metric ids by bulking together all the resolutions of the same use case id to maximize
- # the parallelism.
- resolved_metric_ids = defaultdict(dict)
- for use_case_id, results_indexes in use_case_id_to_index.items():
- metrics_ids = []
- for result_index in results_indexes:
- data = results[result_index]["data"]
- for row in data or ():
- metrics_ids.append(row["metric_id"])
-
- # We have to partition the resolved metric ids per use case id, since the indexer values might clash across
- # use cases.
- resolved_metric_ids[use_case_id].update(
- bulk_reverse_resolve(use_case_id, org_id, [metric_id for metric_id in metrics_ids])
- )
-
- # We iterate over each result and compute a map of `metric_id -> project_id`.
- grouped_stored_metrics = defaultdict(list)
- for use_case_id, results_indexes in use_case_id_to_index.items():
- for result_index in results_indexes:
- data = results[result_index]["data"]
- for row in data or ():
- resolved_metric_id = resolved_metric_ids[use_case_id][row["metric_id"]]
- grouped_stored_metrics[resolved_metric_id].append(row["project_id"])
-
- return grouped_stored_metrics
-
-
def get_custom_measurements(
project_ids: Sequence[int],
organization_id: int,
diff --git a/src/sentry/snuba/metrics/extraction.py b/src/sentry/snuba/metrics/extraction.py
index 99b0fc625cf705..a838ca7cc8a6a9 100644
--- a/src/sentry/snuba/metrics/extraction.py
+++ b/src/sentry/snuba/metrics/extraction.py
@@ -313,8 +313,6 @@ def get_default_spec_version(cls: Any) -> SpecVersion:
"event.type=transaction",
]
-Variables = dict[str, Any]
-
query_builder = UnresolvedQuery(
dataset=Dataset.Transactions, params={}
) # Workaround to get all updated discover functions instead of using the deprecated events fields.
@@ -1326,11 +1324,6 @@ def condition(self) -> RuleCondition | None:
is extracted."""
return self._process_query()
- def is_project_dependent(self) -> bool:
- """Returns whether the spec is unique to a project, which is required for some forms of caching"""
- tags_specs_generator = _ONDEMAND_OP_TO_PROJECT_SPEC_GENERATOR.get(self.op)
- return tags_specs_generator is not None
-
def tags_conditions(self, project: Project) -> list[TagSpec]:
"""Returns a list of tag conditions that will specify how tags are injected into metrics by Relay, and a bool if those specs may be project specific."""
tags_specs_generator = _ONDEMAND_OP_TO_SPEC_GENERATOR.get(self.op)
diff --git a/src/sentry/snuba/metrics/fields/snql.py b/src/sentry/snuba/metrics/fields/snql.py
index c73af996b7a1ac..85925024144292 100644
--- a/src/sentry/snuba/metrics/fields/snql.py
+++ b/src/sentry/snuba/metrics/fields/snql.py
@@ -225,14 +225,6 @@ def _snql_on_tx_satisfaction_factory(
return _snql_on_tx_satisfaction_factory
-def _dist_count_aggregation_on_tx_satisfaction_factory(
- org_id: int, satisfaction: str, metric_ids: Sequence[int], alias: str | None = None
-) -> Function:
- return _aggregation_on_tx_satisfaction_func_factory("countIf")(
- org_id, satisfaction, metric_ids, alias
- )
-
-
def _set_count_aggregation_on_tx_satisfaction_factory(
org_id: int, satisfaction: str, metric_ids: Sequence[int], alias: str | None = None
) -> Function:
@@ -783,16 +775,12 @@ def team_key_transaction_snql(
def _resolve_project_threshold_config(project_ids: Sequence[int], org_id: int) -> SelectType:
+ use_case_id = UseCaseID.TRANSACTIONS
return resolve_project_threshold_config(
- tag_value_resolver=lambda use_case_id, org_id, value: resolve_tag_value(
- use_case_id, org_id, value
- ),
- column_name_resolver=lambda use_case_id, org_id, value: resolve_tag_key(
- use_case_id, org_id, value
- ),
+ tag_value_resolver=lambda org_id, value: resolve_tag_value(use_case_id, org_id, value),
+ column_name_resolver=lambda org_id, value: resolve_tag_key(use_case_id, org_id, value),
project_ids=project_ids,
org_id=org_id,
- use_case_id=UseCaseID.TRANSACTIONS,
)
diff --git a/src/sentry/snuba/metrics/naming_layer/mapping.py b/src/sentry/snuba/metrics/naming_layer/mapping.py
index ae3cb23455d651..5e3cbf86cd0155 100644
--- a/src/sentry/snuba/metrics/naming_layer/mapping.py
+++ b/src/sentry/snuba/metrics/naming_layer/mapping.py
@@ -92,12 +92,6 @@ def get_public_name_from_mri(internal_name: TransactionMRI | SessionMRI | str) -
return internal_name
-def is_private_mri(internal_name: TransactionMRI | SessionMRI | str) -> bool:
- public_name = get_public_name_from_mri(internal_name)
- # If the public name is the same as internal name it means that the internal is "private".
- return public_name == internal_name
-
-
def _extract_name_from_custom_metric_mri(mri: str) -> str | None:
parsed_mri = parse_mri(mri)
if parsed_mri is None:
diff --git a/src/sentry/snuba/metrics/naming_layer/mri.py b/src/sentry/snuba/metrics/naming_layer/mri.py
index 49d7070d882361..284a1e34d3daa3 100644
--- a/src/sentry/snuba/metrics/naming_layer/mri.py
+++ b/src/sentry/snuba/metrics/naming_layer/mri.py
@@ -33,7 +33,6 @@
)
import re
-from collections.abc import Sequence
from dataclasses import dataclass
from enum import Enum
from typing import cast
@@ -42,12 +41,12 @@
from sentry.exceptions import InvalidParams
from sentry.sentry_metrics.use_case_id_registry import UseCaseID
-from sentry.snuba.dataset import EntityKey
from sentry.snuba.metrics.units import format_value_using_unit_and_op
from sentry.snuba.metrics.utils import (
AVAILABLE_GENERIC_OPERATIONS,
AVAILABLE_OPERATIONS,
OP_REGEX,
+ MetricEntity,
MetricOperationType,
MetricUnit,
)
@@ -335,28 +334,27 @@ def is_custom_measurement(parsed_mri: ParsedMRI) -> bool:
)
-def get_entity_key_from_entity_type(entity_type: str, generic_metrics: bool) -> EntityKey:
- entity_name_suffixes = {
- "c": "counters",
- "s": "sets",
- "d": "distributions",
- "g": "gauges",
- }
-
- if generic_metrics:
- return EntityKey(f"generic_metrics_{entity_name_suffixes[entity_type]}")
- else:
- return EntityKey(f"metrics_{entity_name_suffixes[entity_type]}")
+_ENTITY_KEY_MAPPING_GENERIC: dict[str, MetricEntity] = {
+ "c": "generic_metrics_counters",
+ "s": "generic_metrics_sets",
+ "d": "generic_metrics_distributions",
+ "g": "generic_metrics_gauges",
+}
+_ENTITY_KEY_MAPPING_NON_GENERIC: dict[str, MetricEntity] = {
+ "c": "metrics_counters",
+ "s": "metrics_sets",
+ "d": "metrics_distributions",
+}
-def get_available_operations(parsed_mri: ParsedMRI) -> Sequence[str]:
+def get_available_operations(parsed_mri: ParsedMRI) -> list[MetricOperationType]:
if parsed_mri.entity == "e":
return []
elif parsed_mri.namespace == "sessions":
- entity_key = get_entity_key_from_entity_type(parsed_mri.entity, False).value
+ entity_key = _ENTITY_KEY_MAPPING_NON_GENERIC[parsed_mri.entity]
return AVAILABLE_OPERATIONS[entity_key]
else:
- entity_key = get_entity_key_from_entity_type(parsed_mri.entity, True).value
+ entity_key = _ENTITY_KEY_MAPPING_GENERIC[parsed_mri.entity]
return AVAILABLE_GENERIC_OPERATIONS[entity_key]
diff --git a/src/sentry/snuba/metrics/query_builder.py b/src/sentry/snuba/metrics/query_builder.py
index eac1874aa33497..b36ab5e349e5e4 100644
--- a/src/sentry/snuba/metrics/query_builder.py
+++ b/src/sentry/snuba/metrics/query_builder.py
@@ -45,7 +45,6 @@
resolve_tag_key,
resolve_tag_value,
resolve_weak,
- reverse_resolve,
reverse_resolve_tag_value,
)
from sentry.snuba.dataset import Dataset
@@ -615,11 +614,6 @@ def get_date_range(params: Mapping) -> tuple[datetime, datetime, int]:
return start, end, interval
-def parse_tag(use_case_id: UseCaseID, org_id: int, tag_string: str) -> str:
- tag_key = int(tag_string.replace("tags_raw[", "").replace("tags[", "").replace("]", ""))
- return reverse_resolve(use_case_id, org_id, tag_key)
-
-
def get_metric_object_from_metric_field(
metric_field: MetricField,
) -> MetricExpressionBase:
diff --git a/src/sentry/snuba/metrics/utils.py b/src/sentry/snuba/metrics/utils.py
index cef92af6e666c6..df12ad96eaf7cb 100644
--- a/src/sentry/snuba/metrics/utils.py
+++ b/src/sentry/snuba/metrics/utils.py
@@ -4,7 +4,7 @@
from abc import ABC
from collections.abc import Collection, Generator, Mapping, Sequence
from datetime import datetime, timedelta, timezone
-from typing import Literal, TypedDict, overload
+from typing import Literal, NotRequired, TypedDict, overload
from sentry.sentry_metrics.use_case_id_registry import UseCaseID
from sentry.snuba.dataset import EntityKey
@@ -141,7 +141,7 @@
"generic_metrics_gauges",
]
-OP_TO_SNUBA_FUNCTION = {
+OP_TO_SNUBA_FUNCTION: dict[MetricEntity, dict[MetricOperationType, str]] = {
"metrics_counters": {
"sum": "sumIf",
"min_timestamp": "minIf",
@@ -169,7 +169,7 @@
"max_timestamp": "maxIf",
},
}
-GENERIC_OP_TO_SNUBA_FUNCTION = {
+GENERIC_OP_TO_SNUBA_FUNCTION: dict[MetricEntity, dict[MetricOperationType, str]] = {
"generic_metrics_counters": OP_TO_SNUBA_FUNCTION["metrics_counters"],
"generic_metrics_distributions": OP_TO_SNUBA_FUNCTION["metrics_distributions"],
"generic_metrics_sets": OP_TO_SNUBA_FUNCTION["metrics_sets"],
@@ -275,9 +275,6 @@ def generate_operation_regex():
OPERATIONS_TO_ENTITY = {
op: entity for entity, operations in AVAILABLE_OPERATIONS.items() for op in operations
}
-GENERIC_OPERATIONS_TO_ENTITY = {
- op: entity for entity, operations in AVAILABLE_GENERIC_OPERATIONS.items() for op in operations
-}
METRIC_TYPE_TO_ENTITY: Mapping[MetricType, EntityKey] = {
"counter": EntityKey.MetricsCounters,
@@ -342,6 +339,7 @@ class MetricMeta(TypedDict):
type: MetricType
operations: Collection[MetricOperationType]
unit: MetricUnit | None
+ metric_id: NotRequired[int]
mri: str
projectIds: Sequence[int]
blockingStatus: Sequence[BlockedMetric] | None
diff --git a/src/sentry/snuba/metrics_enhanced_performance.py b/src/sentry/snuba/metrics_enhanced_performance.py
index 2273781467e894..0fcdd654a2f19c 100644
--- a/src/sentry/snuba/metrics_enhanced_performance.py
+++ b/src/sentry/snuba/metrics_enhanced_performance.py
@@ -19,6 +19,7 @@
from sentry.snuba.metrics_performance import timeseries_query as metrics_timeseries_query
from sentry.snuba.metrics_performance import top_events_timeseries as metrics_top_events_timeseries
from sentry.snuba.query_sources import QuerySource
+from sentry.snuba.types import DatasetQuery
from sentry.utils.snuba import SnubaTSResult
@@ -48,7 +49,7 @@ def query(
on_demand_metrics_type: MetricSpecType | None = None,
fallback_to_transactions: bool = False,
query_source: QuerySource | None = None,
-):
+) -> EventsResponse:
metrics_compatible = not equations
dataset_reason = discover.DEFAULT_DATASET_REASON
@@ -57,22 +58,22 @@ def query(
result = metrics_query(
selected_columns,
query,
- snuba_params,
- equations,
- orderby,
- offset,
- limit,
- referrer,
- auto_fields,
- auto_aggregations,
- use_aggregate_conditions,
- allow_metric_aggregates,
- conditions,
- functions_acl,
- transform_alias_to_input_format,
- has_metrics,
- use_metrics_layer,
- on_demand_metrics_enabled,
+ snuba_params=snuba_params,
+ equations=equations,
+ orderby=orderby,
+ offset=offset,
+ limit=limit,
+ referrer=referrer,
+ auto_fields=auto_fields,
+ auto_aggregations=auto_aggregations,
+ use_aggregate_conditions=use_aggregate_conditions,
+ allow_metric_aggregates=allow_metric_aggregates,
+ conditions=conditions,
+ functions_acl=functions_acl,
+ transform_alias_to_input_format=transform_alias_to_input_format,
+ has_metrics=has_metrics,
+ use_metrics_layer=use_metrics_layer,
+ on_demand_metrics_enabled=on_demand_metrics_enabled,
on_demand_metrics_type=on_demand_metrics_type,
query_source=query_source,
)
@@ -90,13 +91,13 @@ def query(
# Either metrics failed, or this isn't a query we can enhance with metrics
if not metrics_compatible:
- dataset: types.ModuleType = discover
+ dataset_query: DatasetQuery = discover.query
if fallback_to_transactions:
- dataset = transactions
+ dataset_query = transactions.query
sentry_sdk.set_tag("performance.dataset", "transactions")
else:
sentry_sdk.set_tag("performance.dataset", "discover")
- results = dataset.query(
+ results = dataset_query(
selected_columns,
query,
snuba_params=snuba_params,
@@ -120,7 +121,10 @@ def query(
return results
- return {}
+ return {
+ "data": [],
+ "meta": {"fields": {}},
+ }
def timeseries_query(
diff --git a/src/sentry/snuba/metrics_performance.py b/src/sentry/snuba/metrics_performance.py
index 68e00c52bfccf8..2acd67232bc7bd 100644
--- a/src/sentry/snuba/metrics_performance.py
+++ b/src/sentry/snuba/metrics_performance.py
@@ -6,7 +6,7 @@
from typing import Any, Literal, overload
import sentry_sdk
-from snuba_sdk import Column
+from snuba_sdk import Column, Condition
from sentry.discover.arithmetic import categorize_columns
from sentry.exceptions import IncompatibleMetricsQuery
@@ -30,29 +30,32 @@
def query(
- selected_columns,
- query,
- snuba_params=None,
- equations=None,
- orderby=None,
- offset=None,
- limit=50,
- referrer=None,
- auto_fields=False,
- auto_aggregations=False,
- use_aggregate_conditions=False,
- allow_metric_aggregates=True,
- conditions=None,
- functions_acl=None,
- transform_alias_to_input_format=False,
+ selected_columns: list[str],
+ query: str,
+ snuba_params: SnubaParams,
+ equations: list[str] | None = None,
+ orderby: list[str] | None = None,
+ offset: int | None = None,
+ limit: int = 50,
+ referrer: str | None = None,
+ auto_fields: bool = False,
+ auto_aggregations: bool = False,
+ include_equation_fields: bool = False,
+ allow_metric_aggregates: bool = True,
+ use_aggregate_conditions: bool = False,
+ conditions: list[Condition] | None = None,
+ functions_acl: list[str] | None = None,
+ transform_alias_to_input_format: bool = False,
+ sample: float | None = None,
has_metrics: bool = True,
use_metrics_layer: bool = False,
+ skip_tag_resolution: bool = False,
on_demand_metrics_enabled: bool = False,
on_demand_metrics_type: MetricSpecType | None = None,
granularity: int | None = None,
fallback_to_transactions=False,
query_source: QuerySource | None = None,
-):
+) -> EventsResponse:
with sentry_sdk.start_span(op="mep", name="MetricQueryBuilder"):
metrics_query = MetricsQueryBuilder(
dataset=Dataset.PerformanceMetrics,
@@ -78,6 +81,8 @@ def query(
on_demand_metrics_type=on_demand_metrics_type,
),
)
+ if referrer is None:
+ referrer = ""
metrics_referrer = referrer + ".metrics-enhanced"
results = metrics_query.run_query(referrer=metrics_referrer, query_source=query_source)
with sentry_sdk.start_span(op="mep", name="query.transform_results"):
diff --git a/src/sentry/snuba/models.py b/src/sentry/snuba/models.py
index 75c84f93c39375..6fb83b3478d771 100644
--- a/src/sentry/snuba/models.py
+++ b/src/sentry/snuba/models.py
@@ -12,6 +12,7 @@
from sentry.backup.scopes import ImportScope, RelocationScope
from sentry.db.models import FlexibleForeignKey, Model, region_silo_model
from sentry.db.models.manager.base import BaseManager
+from sentry.incidents.utils.types import DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION
from sentry.models.team import Team
from sentry.users.models.user import User
from sentry.workflow_engine.registry import data_source_type_registry
@@ -153,7 +154,7 @@ def write_relocation_import(
return (subscription.pk, ImportKind.Inserted)
-@data_source_type_registry.register("snuba_query_subscription")
+@data_source_type_registry.register(DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION)
class QuerySubscriptionDataSourceHandler(DataSourceTypeHandler[QuerySubscription]):
@staticmethod
def bulk_get_query_object(
diff --git a/src/sentry/snuba/referrer.py b/src/sentry/snuba/referrer.py
index 667d39232b72dc..3a740223d27aae 100644
--- a/src/sentry/snuba/referrer.py
+++ b/src/sentry/snuba/referrer.py
@@ -1,7 +1,7 @@
from __future__ import annotations
import logging
-from enum import Enum, unique
+from enum import StrEnum, unique
from sentry.utils import metrics
@@ -9,7 +9,7 @@
@unique
-class Referrer(Enum):
+class Referrer(StrEnum):
ALERTRULESERIALIZER_TEST_QUERY_PRIMARY = "alertruleserializer.test_query.primary"
ALERTRULESERIALIZER_TEST_QUERY = "alertruleserializer.test_query"
ANOMALY_DETECTION_HISTORICAL_DATA_QUERY = "anomaly_detection_historical_data_query"
@@ -107,6 +107,7 @@ class Referrer(Enum):
)
API_GROUP_HASHES_LEVELS_GET_LEVELS_OVERVIEW = "api.group_hashes_levels.get_levels_overview"
API_GROUP_HASHES = "api.group-hashes"
+ API_INSIGHTS_USER_GEO_SUBREGION_SELECTOR = "api.insights.user-geo-subregion-selector"
API_ISSUES_ISSUE_EVENTS = "api.issues.issue_events"
API_ISSUES_RELATED_ISSUES = "api.issues.related_issues"
API_METRICS_TOTALS = "api.metrics.totals"
@@ -529,32 +530,32 @@ class Referrer(Enum):
# Performance Cache Module
API_PERFORMANCE_CACHE_LANDING_CACHE_THROUGHPUT_CHART = (
- "api.performance.cache.landing-cache-throughput-chart",
+ "api.performance.cache.landing-cache-throughput-chart"
)
API_PERFORMANCE_CACHE_LANDING_CACHE_TRANSACTION_LIST = (
- "api.performance.cache.landing-cache-transaction-list",
+ "api.performance.cache.landing-cache-transaction-list"
)
API_PERFORMANCE_CACHE_LANDING_CACHE_TRANSACTION_DURATION = (
- "api.performance.cache.landing-cache-transaction-duration",
+ "api.performance.cache.landing-cache-transaction-duration"
)
API_PERFORMANCE_CACHE_SAMPLES_CACHE_METRICS_RIBBON = (
- "api.performance.cache.samples-cache-metrics-ribbon",
+ "api.performance.cache.samples-cache-metrics-ribbon"
)
API_PERFORMANCE_CACHE_SAMPLES_CACHE_TRANSACTION_DURATION_CHART = (
- "api.performance.cache.samples-cache-transaction-duration-chart",
+ "api.performance.cache.samples-cache-transaction-duration-chart"
)
API_PERFORMANCE_CACHE_SAMPLES_CACHE_TRANSACTION_DURATION = (
- "api.performance.cache.samples-cache-transaction-duration",
+ "api.performance.cache.samples-cache-transaction-duration"
)
API_PERFORMANCE_CACHE_SAMPLES_CACHE_SPAN_SAMPLES = (
- "api.performance.cache.samples-cache-span-samples",
+ "api.performance.cache.samples-cache-span-samples"
)
API_PERFORMANCE_CACHE_SAMPLES_CACHE_SPAN_SAMPLES_TRANSACTION_DURATION = (
- "api.performance.cache.samples-cache-span-samples-transaction-duration",
+ "api.performance.cache.samples-cache-span-samples-transaction-duration"
)
API_PERFORMANCE_CACHE_SAMPLES_CACHE_HIT_MISS_CHART = (
- "api.performance.cache.samples-cache-hit-miss-chart",
+ "api.performance.cache.samples-cache-hit-miss-chart"
)
# Performance Queues Module
@@ -716,6 +717,8 @@ class Referrer(Enum):
INCIDENTS_GET_INCIDENT_AGGREGATES_PRIMARY = "incidents.get_incident_aggregates.primary"
INCIDENTS_GET_INCIDENT_AGGREGATES = "incidents.get_incident_aggregates"
IS_ESCALATING_GROUP = "sentry.issues.escalating.is_escalating"
+ ISSUE_DETAILS_STREAMLINE_GRAPH = "issue_details.streamline_graph"
+ ISSUE_DETAILS_STREAMLINE_LIST = "issue_details.streamline_list"
METRIC_EXTRACTION_CARDINALITY_CHECK = "metric_extraction.cardinality_check"
OUTCOMES_TIMESERIES = "outcomes.timeseries"
OUTCOMES_TOTALS = "outcomes.totals"
diff --git a/src/sentry/snuba/sessions_v2.py b/src/sentry/snuba/sessions_v2.py
index 184b3a479e96de..3d83dfa2a615f8 100644
--- a/src/sentry/snuba/sessions_v2.py
+++ b/src/sentry/snuba/sessions_v2.py
@@ -4,7 +4,7 @@
import logging
import math
from datetime import datetime, timedelta, timezone
-from typing import Any
+from typing import Any, NotRequired, Protocol, TypedDict
from snuba_sdk import BooleanCondition, Column, Condition, Function, Limit, Op
@@ -101,6 +101,11 @@
"""
+class _Field(Protocol):
+ def extract_from_row(self, row, group) -> float | None: ...
+ def get_snuba_columns(self, raw_groupby) -> list[str]: ...
+
+
class SessionsField:
def get_snuba_columns(self, raw_groupby):
if "session.status" in raw_groupby:
@@ -188,7 +193,7 @@ def extract_from_row(self, row, group):
return None
-COLUMN_MAP = {
+COLUMN_MAP: dict[str, _Field] = {
"sum(session)": SessionsField(),
"count_unique(user)": UsersField(),
"avg(session.duration)": DurationAverageField(),
@@ -201,6 +206,12 @@ def extract_from_row(self, row, group):
}
+class _GroupBy(Protocol):
+ def get_snuba_columns(self) -> list[str]: ...
+ def get_snuba_groupby(self) -> list[str]: ...
+ def get_keys_for_row(self, row) -> list[tuple[str, str]]: ...
+
+
class SimpleGroupBy:
def __init__(self, row_name: str, name: str | None = None):
self.row_name = row_name
@@ -229,7 +240,7 @@ def get_keys_for_row(self, row):
# NOTE: in the future we might add new `user_agent` and `os` fields
-GROUPBY_MAP = {
+GROUPBY_MAP: dict[str, _GroupBy] = {
"project": SimpleGroupBy("project_id", "project"),
"environment": SimpleGroupBy("environment"),
"release": SimpleGroupBy("release"),
@@ -423,8 +434,8 @@ def get_constrained_date_range(
max_points=MAX_POINTS,
restrict_date_range=True,
) -> tuple[datetime, datetime, int]:
- interval = parse_stats_period(params.get("interval", "1h"))
- interval = int(3600 if interval is None else interval.total_seconds())
+ interval_td = parse_stats_period(params.get("interval", "1h"))
+ interval = int(3600 if interval_td is None else interval_td.total_seconds())
smallest_interval, interval_str = allowed_resolution.value
if interval % smallest_interval != 0 or interval < smallest_interval:
@@ -569,7 +580,8 @@ def make_timeseries(rows, group):
row[ts_col] = row[ts_col][:19] + "Z"
rows.sort(key=lambda row: row[ts_col])
- fields = [(name, field, list()) for name, field in query.fields.items()]
+ fields: list[tuple[str, _Field, list[float | None]]]
+ fields = [(name, field, []) for name, field in query.fields.items()]
group_index = 0
while group_index < len(rows):
@@ -618,9 +630,16 @@ def make_totals(totals, group):
}
+class _CategoryStats(TypedDict):
+ category: str
+ outcomes: dict[str, int]
+ totals: dict[str, int]
+ reason: NotRequired[str]
+
+
def massage_sessions_result_summary(
query, result_totals, outcome_query=None
-) -> dict[str, list[Any]]:
+) -> tuple[dict[int, dict[str, dict[str, _CategoryStats]]], dict[str, list[Any]]]:
"""
Post-processes the query result.
@@ -667,8 +686,8 @@ def make_totals(totals, group):
}
def get_category_stats(
- reason, totals, outcome, category, category_stats: dict[str, int] | None = None
- ):
+ reason, totals, outcome, category, category_stats: _CategoryStats | None = None
+ ) -> _CategoryStats:
if not category_stats:
category_stats = {
"category": category,
@@ -697,7 +716,7 @@ def get_category_stats(
return category_stats
keys = set(total_groups.keys())
- projects = {}
+ projects: dict[int, dict[str, dict[str, _CategoryStats]]] = {}
for key in keys:
by = dict(key)
@@ -708,8 +727,7 @@ def get_category_stats(
totals = make_totals(total_groups.get(key, [None]), by)
- if project_id not in projects:
- projects[project_id] = {"categories": {}}
+ projects.setdefault(project_id, {"categories": {}})
if category in projects[project_id]["categories"]:
# update stats dict for category
@@ -763,18 +781,16 @@ def get_timestamps(query):
def _split_rows_groupby(rows, groupby):
- groups = {}
+ groups: dict[frozenset[str], list[object]] = {}
if rows is None:
return groups
for row in rows:
key_parts = (group.get_keys_for_row(row) for group in groupby)
keys = itertools.product(*key_parts)
- for key in keys:
- key = frozenset(key)
+ for key_tup in keys:
+ key = frozenset(key_tup)
- if key not in groups:
- groups[key] = []
- groups[key].append(row)
+ groups.setdefault(key, []).append(row)
return groups
diff --git a/src/sentry/snuba/spans_eap.py b/src/sentry/snuba/spans_eap.py
index 5356f2fd29f64e..35ebede9dc6d7e 100644
--- a/src/sentry/snuba/spans_eap.py
+++ b/src/sentry/snuba/spans_eap.py
@@ -1,6 +1,7 @@
import logging
from collections.abc import Mapping, Sequence
from datetime import timedelta
+from typing import Any, TypedDict
import sentry_sdk
from snuba_sdk import Column, Condition
@@ -49,8 +50,7 @@ def query(
dataset: Dataset = Dataset.Discover,
fallback_to_transactions: bool = False,
query_source: QuerySource | None = None,
- enable_rpc: bool | None = False,
-):
+) -> EventsResponse:
builder = SpansEAPQueryBuilder(
Dataset.EventsAnalyticsPlatform,
{},
@@ -244,14 +244,16 @@ def top_events_timeseries(
snuba_params.end_date,
rollup,
)
+
with sentry_sdk.start_span(op="spans_indexed", name="top_events.transform_results") as span:
span.set_data("result_count", len(result.get("data", [])))
result = top_events_builder.process_results(result)
+ other_result = top_events_builder.process_results(other_result)
issues: Mapping[int, str | None] = {}
translated_groupby = top_events_builder.translated_groupby
- results = (
+ results: dict[str, TimeseriesResult] = (
{discover.OTHER_KEY: {"order": limit, "data": other_result["data"]}}
if len(other_result.get("data", []))
else {}
@@ -292,3 +294,8 @@ def top_events_timeseries(
)
return top_events_results
+
+
+class TimeseriesResult(TypedDict):
+ order: int
+ data: list[dict[str, Any]]
diff --git a/src/sentry/snuba/spans_indexed.py b/src/sentry/snuba/spans_indexed.py
index 602e0e5c256b83..2ef001caecbd67 100644
--- a/src/sentry/snuba/spans_indexed.py
+++ b/src/sentry/snuba/spans_indexed.py
@@ -101,7 +101,7 @@ def timeseries_query(
equations, columns = categorize_columns(selected_columns)
with sentry_sdk.start_span(op="spans_indexed", name="TimeseriesSpanIndexedQueryBuilder"):
- query = TimeseriesSpanIndexedQueryBuilder(
+ query_obj = TimeseriesSpanIndexedQueryBuilder(
Dataset.SpansIndexed,
{},
rollup,
@@ -113,16 +113,16 @@ def timeseries_query(
transform_alias_to_input_format=transform_alias_to_input_format,
),
)
- result = query.run_query(referrer, query_source=query_source)
+ result = query_obj.run_query(referrer, query_source=query_source)
with sentry_sdk.start_span(op="spans_indexed", name="query.transform_results"):
- result = query.process_results(result)
+ result = query_obj.process_results(result)
result["data"] = (
discover.zerofill(
result["data"],
snuba_params.start_date,
snuba_params.end_date,
rollup,
- "time",
+ ["time"],
)
if zerofill_results
else result["data"]
@@ -229,7 +229,7 @@ def top_events_timeseries(
{
"data": (
discover.zerofill(
- [], snuba_params.start_date, snuba_params.end_date, rollup, "time"
+ [], snuba_params.start_date, snuba_params.end_date, rollup, ["time"]
)
if zerofill_results
else []
@@ -243,7 +243,7 @@ def top_events_timeseries(
span.set_data("result_count", len(result.get("data", [])))
result = top_events_builder.process_results(result)
- issues = {}
+ issues: dict[int, str | None] = {}
translated_groupby = top_events_builder.translated_groupby
results = (
@@ -264,8 +264,9 @@ def top_events_timeseries(
"spans_indexed.top-events.timeseries.key-mismatch",
extra={"result_key": result_key, "top_event_keys": list(results.keys())},
)
- for key, item in results.items():
- results[key] = SnubaTSResult(
+
+ return {
+ key: SnubaTSResult(
{
"data": (
discover.zerofill(
@@ -273,7 +274,7 @@ def top_events_timeseries(
snuba_params.start_date,
snuba_params.end_date,
rollup,
- "time",
+ ["time"],
)
if zerofill_results
else item["data"]
@@ -284,5 +285,5 @@ def top_events_timeseries(
snuba_params.end_date,
rollup,
)
-
- return results
+ for key, item in results.items()
+ }
diff --git a/src/sentry/snuba/spans_rpc.py b/src/sentry/snuba/spans_rpc.py
index 930aee26eda83c..9b56b742b8c78e 100644
--- a/src/sentry/snuba/spans_rpc.py
+++ b/src/sentry/snuba/spans_rpc.py
@@ -1,7 +1,9 @@
import logging
+from collections import defaultdict
from datetime import timedelta
from typing import Any
+import sentry_sdk
from sentry_protos.snuba.v1.endpoint_time_series_pb2 import TimeSeries, TimeSeriesRequest
from sentry_protos.snuba.v1.endpoint_trace_item_table_pb2 import Column, TraceItemTableRequest
from sentry_protos.snuba.v1.trace_item_attribute_pb2 import AttributeAggregation, AttributeKey
@@ -29,6 +31,7 @@ def categorize_column(column: ResolvedColumn | ResolvedFunction) -> Column:
return Column(key=column.proto_definition, label=column.public_alias)
+@sentry_sdk.trace
def run_table_query(
params: SnubaParams,
query_string: str,
@@ -45,8 +48,9 @@ def run_table_query(
SearchResolver(params=params, config=config) if search_resolver is None else search_resolver
)
meta = resolver.resolve_meta(referrer=referrer)
- query = resolver.resolve_query(query_string)
- columns, contexts = resolver.resolve_columns(selected_columns)
+ query, query_contexts = resolver.resolve_query(query_string)
+ columns, column_contexts = resolver.resolve_columns(selected_columns)
+ contexts = resolver.clean_contexts(query_contexts + column_contexts)
# We allow orderby function_aliases if they're a selected_column
# eg. can orderby sum_span_self_time, assuming sum(span.self_time) is selected
orderby_aliases = {
@@ -153,7 +157,7 @@ def get_timeseries_query(
) -> TimeSeriesRequest:
resolver = SearchResolver(params=params, config=config)
meta = resolver.resolve_meta(referrer=referrer)
- query = resolver.resolve_query(query_string)
+ query, query_contexts = resolver.resolve_query(query_string)
(aggregations, _) = resolver.resolve_aggregates(y_axes)
(groupbys, _) = resolver.resolve_columns(groupby)
if extra_conditions is not None:
@@ -176,6 +180,8 @@ def get_timeseries_query(
if isinstance(groupby.proto_definition, AttributeKey)
],
granularity_secs=granularity_secs,
+ # TODO: need to add this once the RPC supports it
+ # virtual_column_contexts=[context for context in resolver.clean_contexts(query_contexts) if context is not None],
)
@@ -195,6 +201,7 @@ def validate_granularity(
)
+@sentry_sdk.trace
def run_timeseries_query(
params: SnubaParams,
query_string: str,
@@ -217,7 +224,7 @@ def run_timeseries_query(
result: SnubaData = []
confidences: SnubaData = []
for timeseries in rpc_response.result_timeseries:
- processed, confidence = _process_timeseries(timeseries, params, granularity_secs)
+ processed, confidence = _process_all_timeseries([timeseries], params, granularity_secs)
if len(result) == 0:
result = processed
confidences = confidence
@@ -253,7 +260,7 @@ def run_timeseries_query(
if comp_rpc_response.result_timeseries:
timeseries = comp_rpc_response.result_timeseries[0]
- processed, _ = _process_timeseries(timeseries, params, granularity_secs)
+ processed, _ = _process_all_timeseries([timeseries], params, granularity_secs)
label = get_function_alias(timeseries.label)
for existing, new in zip(result, processed):
existing["comparisonCount"] = new[label]
@@ -266,6 +273,7 @@ def run_timeseries_query(
)
+@sentry_sdk.trace
def build_top_event_conditions(
resolver: SearchResolver, top_events: EAPResponse, groupby_columns: list[str]
) -> Any:
@@ -281,7 +289,7 @@ def build_top_event_conditions(
]
else:
value = event[key]
- resolved_term = resolver.resolve_term(
+ resolved_term, context = resolver.resolve_term(
SearchFilter(
key=SearchKey(name=key),
operator="=",
@@ -290,7 +298,7 @@ def build_top_event_conditions(
)
if resolved_term is not None:
row_conditions.append(resolved_term)
- other_term = resolver.resolve_term(
+ other_term, context = resolver.resolve_term(
SearchFilter(
key=SearchKey(name=key),
operator="!=",
@@ -373,7 +381,7 @@ def run_top_events_timeseries_query(
other_response = snuba_rpc.timeseries_rpc(other_request)
"""Process the results"""
- map_result_key_to_timeseries = {}
+ map_result_key_to_timeseries = defaultdict(list)
for timeseries in rpc_response.result_timeseries:
groupby_attributes = timeseries.group_by_attributes
remapped_groupby = {}
@@ -388,12 +396,12 @@ def run_top_events_timeseries_query(
resolved_groupby, _ = search_resolver.resolve_attribute(col)
remapped_groupby[col] = groupby_attributes[resolved_groupby.internal_name]
result_key = create_result_key(remapped_groupby, groupby_columns, {})
- map_result_key_to_timeseries[result_key] = timeseries
+ map_result_key_to_timeseries[result_key].append(timeseries)
final_result = {}
# Top Events actually has the order, so we need to iterate through it, regenerate the result keys
for index, row in enumerate(top_events["data"]):
result_key = create_result_key(row, groupby_columns, {})
- result_data, result_confidence = _process_timeseries(
+ result_data, result_confidence = _process_all_timeseries(
map_result_key_to_timeseries[result_key],
params,
granularity_secs,
@@ -409,8 +417,8 @@ def run_top_events_timeseries_query(
granularity_secs,
)
if other_response.result_timeseries:
- result_data, result_confidence = _process_timeseries(
- other_response.result_timeseries[0],
+ result_data, result_confidence = _process_all_timeseries(
+ [timeseries for timeseries in other_response.result_timeseries],
params,
granularity_secs,
)
@@ -427,19 +435,29 @@ def run_top_events_timeseries_query(
return final_result
-def _process_timeseries(
- timeseries: TimeSeries, params: SnubaParams, granularity_secs: int, order: int | None = None
+def _process_all_timeseries(
+ all_timeseries: list[TimeSeries],
+ params: SnubaParams,
+ granularity_secs: int,
+ order: int | None = None,
) -> tuple[SnubaData, SnubaData]:
result: SnubaData = []
confidence: SnubaData = []
- # Timeseries serialization expects the function alias (eg. `count` not `count()`)
- label = get_function_alias(timeseries.label)
- if len(result) < len(timeseries.buckets):
- for bucket in timeseries.buckets:
- result.append({"time": bucket.seconds})
- confidence.append({"time": bucket.seconds})
- for index, data_point in enumerate(timeseries.data_points):
- result[index][label] = process_value(data_point.data)
- confidence[index][label] = CONFIDENCES.get(data_point.reliability, None)
+
+ for timeseries in all_timeseries:
+ # Timeseries serialization expects the function alias (eg. `count` not `count()`)
+ label = get_function_alias(timeseries.label)
+ if result:
+ for index, bucket in enumerate(timeseries.buckets):
+ assert result[index]["time"] == bucket.seconds
+ assert confidence[index]["time"] == bucket.seconds
+ else:
+ for bucket in timeseries.buckets:
+ result.append({"time": bucket.seconds})
+ confidence.append({"time": bucket.seconds})
+
+ for index, data_point in enumerate(timeseries.data_points):
+ result[index][label] = process_value(data_point.data)
+ confidence[index][label] = CONFIDENCES.get(data_point.reliability, None)
return result, confidence
diff --git a/src/sentry/snuba/types.py b/src/sentry/snuba/types.py
new file mode 100644
index 00000000000000..ae6c599819ad0c
--- /dev/null
+++ b/src/sentry/snuba/types.py
@@ -0,0 +1,40 @@
+from typing import Protocol
+
+from snuba_sdk import Column, Condition
+
+from sentry.search.events.types import EventsResponse, SnubaParams
+from sentry.snuba.dataset import Dataset
+from sentry.snuba.metrics.extraction import MetricSpecType
+from sentry.snuba.query_sources import QuerySource
+
+
+class DatasetQuery(Protocol):
+ def __call__(
+ self,
+ selected_columns: list[str],
+ query: str,
+ snuba_params: SnubaParams,
+ equations: list[str] | None = None,
+ orderby: list[str] | None = None,
+ offset: int | None = None,
+ limit: int = 50,
+ referrer: str | None = None,
+ auto_fields: bool = False,
+ auto_aggregations: bool = False,
+ include_equation_fields: bool = False,
+ allow_metric_aggregates: bool = False,
+ use_aggregate_conditions: bool = False,
+ conditions: list[Condition] | None = None,
+ functions_acl: list[str] | None = None,
+ transform_alias_to_input_format: bool = False,
+ sample: float | None = None,
+ has_metrics: bool = False,
+ use_metrics_layer: bool = False,
+ skip_tag_resolution: bool = False,
+ extra_columns: list[Column] | None = None,
+ on_demand_metrics_enabled: bool = False,
+ on_demand_metrics_type: MetricSpecType | None = None,
+ dataset: Dataset = Dataset.Discover,
+ fallback_to_transactions: bool = False,
+ query_source: QuerySource | None = None,
+ ) -> EventsResponse: ...
diff --git a/src/sentry/statistical_detectors/detector.py b/src/sentry/statistical_detectors/detector.py
index 0b9540ddc5a293..c633d46c7beca0 100644
--- a/src/sentry/statistical_detectors/detector.py
+++ b/src/sentry/statistical_detectors/detector.py
@@ -62,6 +62,10 @@ class RegressionDetector(ABC):
resolution_rel_threshold: float
escalation_rel_threshold: float
+ @classmethod
+ @abstractmethod
+ def min_throughput_threshold(cls) -> int: ...
+
@classmethod
def configure_tags(cls):
sentry_sdk.set_tag("regression.source", cls.source)
@@ -105,20 +109,28 @@ def detect_trends(
unique_project_ids: set[int] = set()
total_count = 0
+ skipped_count = 0
regressed_count = 0
improved_count = 0
algorithm = cls.detector_algorithm_factory()
store = cls.detector_store_factory()
- for payloads in chunked(cls.all_payloads(projects, start), batch_size):
- total_count += len(payloads)
+ for raw_payloads in chunked(cls.all_payloads(projects, start), batch_size):
+ total_count += len(raw_payloads)
- raw_states = store.bulk_read_states(payloads)
+ raw_states = store.bulk_read_states(raw_payloads)
+ payloads = []
states = []
- for raw_state, payload in zip(raw_states, payloads):
+ for raw_state, payload in zip(raw_states, raw_payloads):
+ # If the number of events is too low, then we skip updating
+ # to minimize false positives
+ if payload.count <= cls.min_throughput_threshold():
+ skipped_count += 1
+ continue
+
metrics.distribution(
"statistical_detectors.objects.throughput",
value=payload.count,
@@ -133,6 +145,7 @@ def detect_trends(
elif trend_type == TrendType.Improved:
improved_count += 1
+ payloads.append(payload)
states.append(None if new_state is None else new_state.to_redis_dict())
yield TrendBundle(
@@ -142,7 +155,8 @@ def detect_trends(
state=new_state,
)
- store.bulk_write_states(payloads, states)
+ if payloads and states:
+ store.bulk_write_states(payloads, states)
metrics.incr(
"statistical_detectors.projects.active",
@@ -158,6 +172,13 @@ def detect_trends(
sample_rate=1.0,
)
+ metrics.incr(
+ "statistical_detectors.objects.skipped",
+ amount=skipped_count,
+ tags={"source": cls.source, "kind": cls.kind},
+ sample_rate=1.0,
+ )
+
metrics.incr(
"statistical_detectors.objects.regressed",
amount=regressed_count,
diff --git a/src/sentry/tagstore/types.py b/src/sentry/tagstore/types.py
index 2ca77f628792ef..4c31ca71f5f0f0 100644
--- a/src/sentry/tagstore/types.py
+++ b/src/sentry/tagstore/types.py
@@ -137,7 +137,7 @@ class TagValueSerializerResponse(TagValueSerializerResponseOptional):
@register(GroupTagValue)
@register(TagValue)
class TagValueSerializer(Serializer):
- def serialize(self, obj, attrs, user, **kwargs):
+ def serialize(self, obj, attrs, user, **kwargs) -> TagValueSerializerResponse:
from sentry import tagstore
key = tagstore.get_standardized_key(obj.key)
diff --git a/src/sentry/tasks/base.py b/src/sentry/tasks/base.py
index 3b703d27fd9fb5..3a2c404b4a7150 100644
--- a/src/sentry/tasks/base.py
+++ b/src/sentry/tasks/base.py
@@ -1,9 +1,7 @@
from __future__ import annotations
import logging
-import resource
from collections.abc import Callable, Iterable
-from contextlib import contextmanager
from datetime import datetime
from functools import wraps
from typing import Any, TypeVar
@@ -15,6 +13,7 @@
from sentry.celery import app
from sentry.silo.base import SiloLimit, SiloMode
from sentry.utils import metrics
+from sentry.utils.memory import track_memory_usage
from sentry.utils.sdk import Scope, capture_exception
ModelT = TypeVar("ModelT", bound=Model)
@@ -59,19 +58,6 @@ def __call__(self, decorated_task: Any) -> Any:
return limited_func
-def get_rss_usage():
- return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
-
-
-@contextmanager
-def track_memory_usage(metric, **kwargs):
- before = get_rss_usage()
- try:
- yield
- finally:
- metrics.distribution(metric, get_rss_usage() - before, unit="byte", **kwargs)
-
-
def load_model_from_db(
tp: type[ModelT], instance_or_id: ModelT | int, allow_cache: bool = True
) -> ModelT:
diff --git a/src/sentry/tasks/commit_context.py b/src/sentry/tasks/commit_context.py
index f017618e8199f5..75a8dc0de2084b 100644
--- a/src/sentry/tasks/commit_context.py
+++ b/src/sentry/tasks/commit_context.py
@@ -231,6 +231,9 @@ def process_commit_context(
project_id=project.id,
group_id=group_id,
new_assignment=created,
+ user_id=group_owner.user_id,
+ group_owner_type=group_owner.type,
+ method="scm_integration",
)
except UnableToAcquireLock:
pass
diff --git a/src/sentry/tasks/delete_seer_grouping_records.py b/src/sentry/tasks/delete_seer_grouping_records.py
index 4deaec4d3525a5..acbe9a4dd4e068 100644
--- a/src/sentry/tasks/delete_seer_grouping_records.py
+++ b/src/sentry/tasks/delete_seer_grouping_records.py
@@ -8,7 +8,7 @@
delete_grouping_records_by_hash,
delete_project_grouping_records,
)
-from sentry.seer.similarity.utils import killswitch_enabled
+from sentry.seer.similarity.utils import ReferrerOptions, killswitch_enabled
from sentry.silo.base import SiloMode
from sentry.tasks.base import instrumented_task
@@ -34,7 +34,7 @@ def delete_seer_grouping_records_by_hash(
Task to delete seer grouping records by hash list.
Calls the seer delete by hash endpoint with batches of hashes of size `BATCH_SIZE`.
"""
- if killswitch_enabled(project_id) or options.get(
+ if killswitch_enabled(project_id, ReferrerOptions.DELETION) or options.get(
"seer.similarity-embeddings-delete-by-hash-killswitch.enabled"
):
return
@@ -57,7 +57,7 @@ def call_delete_seer_grouping_records_by_hash(
if (
project
and project.get_option("sentry:similarity_backfill_completed")
- and not killswitch_enabled(project.id)
+ and not killswitch_enabled(project.id, ReferrerOptions.DELETION)
and not options.get("seer.similarity-embeddings-delete-by-hash-killswitch.enabled")
):
# TODO (jangjodi): once we store seer grouping info in GroupHash, we should filter by that here
@@ -86,7 +86,7 @@ def call_seer_delete_project_grouping_records(
*args: Any,
**kwargs: Any,
) -> None:
- if killswitch_enabled(project_id) or options.get(
+ if killswitch_enabled(project_id, ReferrerOptions.DELETION) or options.get(
"seer.similarity-embeddings-delete-by-hash-killswitch.enabled"
):
return
diff --git a/src/sentry/tasks/embeddings_grouping/backfill_seer_grouping_records_for_project.py b/src/sentry/tasks/embeddings_grouping/backfill_seer_grouping_records_for_project.py
index f22311cb035081..ea090ab018d507 100644
--- a/src/sentry/tasks/embeddings_grouping/backfill_seer_grouping_records_for_project.py
+++ b/src/sentry/tasks/embeddings_grouping/backfill_seer_grouping_records_for_project.py
@@ -9,11 +9,15 @@
from sentry.grouping.api import GroupingConfigNotFound
from sentry.grouping.enhancer.exceptions import InvalidEnhancerConfig
from sentry.models.project import Project
-from sentry.seer.similarity.utils import killswitch_enabled, project_is_seer_eligible
+from sentry.seer.similarity.utils import (
+ ReferrerOptions,
+ killswitch_enabled,
+ project_is_seer_eligible,
+)
from sentry.silo.base import SiloMode
from sentry.tasks.base import instrumented_task
from sentry.tasks.embeddings_grouping.utils import (
- FeatureError,
+ NODESTORE_RETRY_EXCEPTIONS,
GroupStacktraceData,
create_project_cohort,
delete_seer_grouping_records,
@@ -80,7 +84,7 @@ def backfill_seer_grouping_records_for_project(
assert current_project_id is not None
if options.get("seer.similarity-backfill-killswitch.enabled") or killswitch_enabled(
- current_project_id
+ current_project_id, ReferrerOptions.BACKFILL
):
logger.info("backfill_seer_grouping_records.killswitch_enabled")
return
@@ -109,13 +113,6 @@ def backfill_seer_grouping_records_for_project(
last_processed_group_id_input,
last_processed_project_index_input,
)
- except FeatureError:
- logger.info(
- "backfill_seer_grouping_records.no_feature",
- extra={"current_project_id": current_project_id},
- )
- # TODO: let's just delete this branch since feature is on
- return
except Project.DoesNotExist:
logger.info(
"backfill_seer_grouping_records.project_does_not_exist",
@@ -226,6 +223,16 @@ def backfill_seer_grouping_records_for_project(
except EVENT_INFO_EXCEPTIONS:
metrics.incr("sentry.tasks.backfill_seer_grouping_records.grouping_config_error")
nodestore_results, group_hashes_dict = GroupStacktraceData(data=[], stacktrace_list=[]), {}
+ except NODESTORE_RETRY_EXCEPTIONS as e:
+ extra = {
+ "organization_id": project.organization.id,
+ "project_id": project.id,
+ "error": e.message,
+ }
+ logger.exception(
+ "tasks.backfill_seer_grouping_records.bulk_event_lookup_exception", extra=extra
+ )
+ group_hashes_dict = {}
if not group_hashes_dict:
call_next_backfill(
@@ -305,7 +312,7 @@ def call_next_backfill(
only_delete: bool = False,
last_processed_group_id: int | None = None,
last_processed_project_id: int | None = None,
-):
+) -> None:
if last_processed_group_id is not None:
backfill_seer_grouping_records_for_project.apply_async(
args=[
diff --git a/src/sentry/tasks/embeddings_grouping/utils.py b/src/sentry/tasks/embeddings_grouping/utils.py
index b457ce50322c7e..84f121ff440ec2 100644
--- a/src/sentry/tasks/embeddings_grouping/utils.py
+++ b/src/sentry/tasks/embeddings_grouping/utils.py
@@ -10,12 +10,11 @@
from google.api_core.exceptions import DeadlineExceeded, ServiceUnavailable
from snuba_sdk import Column, Condition, Entity, Limit, Op, Query, Request
-from sentry import features, nodestore, options
+from sentry import nodestore, options
from sentry.conf.server import SEER_SIMILARITY_MODEL_VERSION
from sentry.eventstore.models import Event
-from sentry.grouping.grouping_info import get_grouping_info
+from sentry.grouping.grouping_info import get_grouping_info_from_variants
from sentry.issues.grouptype import ErrorGroupType
-from sentry.issues.occurrence_consumer import EventLookupError
from sentry.models.group import Group, GroupStatus
from sentry.models.project import Project
from sentry.seer.similarity.grouping_records import (
@@ -36,6 +35,7 @@
event_content_has_stacktrace,
filter_null_from_string,
get_stacktrace_string_with_metrics,
+ has_too_many_contributing_frames,
)
from sentry.snuba.dataset import Dataset
from sentry.snuba.referrer import Referrer
@@ -57,10 +57,6 @@
logger = logging.getLogger(__name__)
-class FeatureError(Exception):
- pass
-
-
class GroupEventRow(TypedDict):
event_id: str
group_id: int
@@ -142,8 +138,6 @@ def initialize_backfill(
},
)
project = Project.objects.get_from_cache(id=project_id)
- if not features.has("projects:similarity-embeddings-backfill", project):
- raise FeatureError("Project does not have feature")
last_processed_project_index_ret = (
last_processed_project_index if last_processed_project_index else 0
@@ -363,11 +357,17 @@ def get_events_from_nodestore(
bulk_event_ids = set()
for group_id, event in nodestore_events.items():
event._project_cache = project
- if event and event.data and event_content_has_stacktrace(event):
- grouping_info = get_grouping_info(None, project=project, event=event)
- stacktrace_string = get_stacktrace_string_with_metrics(
- grouping_info, event.platform, ReferrerOptions.BACKFILL
- )
+ stacktrace_string = None
+
+ if event and event_content_has_stacktrace(event):
+ variants = event.get_grouping_variants(normalize_stacktraces=True)
+
+ if not has_too_many_contributing_frames(event, variants, ReferrerOptions.BACKFILL):
+ grouping_info = get_grouping_info_from_variants(variants)
+ stacktrace_string = get_stacktrace_string_with_metrics(
+ grouping_info, event.platform, ReferrerOptions.BACKFILL
+ )
+
if not stacktrace_string:
invalid_event_group_ids.append(group_id)
continue
@@ -571,26 +571,13 @@ def update_groups(project, seer_response, group_id_batch_filtered, group_hashes_
def _make_nodestore_call(project, node_keys):
- try:
- bulk_data = _retry_operation(
- nodestore.backend.get_multi,
- node_keys,
- retries=3,
- delay=2,
- exceptions=NODESTORE_RETRY_EXCEPTIONS,
- )
- except NODESTORE_RETRY_EXCEPTIONS as e:
- extra = {
- "organization_id": project.organization.id,
- "project_id": project.id,
- "node_keys": json.dumps(node_keys),
- "error": e.message,
- }
- logger.exception(
- "tasks.backfill_seer_grouping_records.bulk_event_lookup_exception",
- extra=extra,
- )
- raise
+ bulk_data = _retry_operation(
+ nodestore.backend.get_multi,
+ node_keys,
+ retries=3,
+ delay=2,
+ exceptions=NODESTORE_RETRY_EXCEPTIONS,
+ )
return bulk_data
@@ -701,16 +688,6 @@ def _retry_operation(operation, *args, retries, delay, exceptions, **kwargs):
raise
-# TODO: delete this and its tests
-def lookup_event(project_id: int, event_id: str, group_id: int) -> Event:
- data = nodestore.backend.get(Event.generate_node_id(project_id, event_id))
- if data is None:
- raise EventLookupError(f"Failed to lookup event({event_id}) for project_id({project_id})")
- event = Event(event_id=event_id, project_id=project_id, group_id=group_id)
- event.data = data
- return event
-
-
def delete_seer_grouping_records(
project_id: int,
):
diff --git a/src/sentry/tasks/groupowner.py b/src/sentry/tasks/groupowner.py
index e5371142994479..81c7a1b0816f99 100644
--- a/src/sentry/tasks/groupowner.py
+++ b/src/sentry/tasks/groupowner.py
@@ -4,6 +4,7 @@
from django.utils import timezone
+from sentry import analytics
from sentry.locks import locks
from sentry.models.commit import Commit
from sentry.models.groupowner import GroupOwner, GroupOwnerType
@@ -103,6 +104,17 @@ def _process_suspect_commits(
"project": project_id,
},
)
+ analytics.record(
+ "groupowner.assignment",
+ organization_id=project.organization_id,
+ project_id=project.id,
+ group_id=group_id,
+ new_assignment=created,
+ user_id=go.user_id,
+ group_owner_type=go.type,
+ method="release_commit",
+ )
+
except GroupOwner.MultipleObjectsReturned:
GroupOwner.objects.filter(
group_id=group_id,
diff --git a/src/sentry/tasks/on_demand_metrics.py b/src/sentry/tasks/on_demand_metrics.py
index 2b38c0956ff1cb..25e095405880c1 100644
--- a/src/sentry/tasks/on_demand_metrics.py
+++ b/src/sentry/tasks/on_demand_metrics.py
@@ -61,10 +61,6 @@ def _set_currently_processing_batch(current_batch: int) -> None:
cache.set(_get_widget_processing_batch_key(), current_batch, timeout=3600)
-def _set_cardinality_cache(cache_key: str, is_low_cardinality: bool) -> None:
- cache.set(cache_key, is_low_cardinality, timeout=_WIDGET_QUERY_CARDINALITY_TTL)
-
-
def _get_previous_processing_batch() -> int:
return cache.get(_get_widget_processing_batch_key(), 0)
diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py
index 35bbc5e8a21fb8..9cda6d27f367a1 100644
--- a/src/sentry/tasks/post_process.py
+++ b/src/sentry/tasks/post_process.py
@@ -40,6 +40,7 @@
from sentry.utils.sdk import bind_organization_context, set_current_event_project
from sentry.utils.sdk_crashes.sdk_crash_detection_config import build_sdk_crash_detection_configs
from sentry.utils.services import build_instance_from_options_of_type
+from sentry.workflow_engine.types import WorkflowJob
if TYPE_CHECKING:
from sentry.eventstore.models import Event, GroupEvent
@@ -679,35 +680,30 @@ def get_event_raise_exception() -> Event:
)
metric_tags["occurrence_type"] = group_event.group.issue_type.slug
- if not is_reprocessed and event.data.get("received"):
- duration = time() - event.data["received"]
- metrics.timing(
- "events.time-to-post-process",
- duration,
- instance=event.data["platform"],
- tags=metric_tags,
- )
-
- # We see occasional metrics being recorded with very old data,
- # temporarily log some information about these groups to help
- # investigate.
- if duration and duration > 432_000: # 5 days (5*24*60*60)
- logger.warning(
- "tasks.post_process.old_time_to_post_process",
- extra={
- "group_id": group_id,
- "project_id": project_id,
- "duration": duration,
- "received": event.data["received"],
- "platform": event.data["platform"],
- "reprocessing": json.dumps(
- get_path(event.data, "contexts", "reprocessing")
- ),
- "original_issue_id": json.dumps(
- get_path(event.data, "contexts", "reprocessing", "original_issue_id")
- ),
- },
+ if not is_reprocessed:
+ received_at = event.data.get("received")
+ saved_at = event.data.get("nodestore_insert")
+ post_processed_at = time()
+
+ if saved_at:
+ metrics.timing(
+ "events.saved_to_post_processed",
+ post_processed_at - saved_at,
+ instance=event.data["platform"],
+ tags=metric_tags,
)
+ else:
+ metrics.incr("events.missing_nodestore_insert", tags=metric_tags)
+
+ if received_at:
+ metrics.timing(
+ "events.time-to-post-process",
+ post_processed_at - received_at,
+ instance=event.data["platform"],
+ tags=metric_tags,
+ )
+ else:
+ metrics.incr("events.missing_received", tags=metric_tags)
def run_post_process_job(job: PostProcessJob) -> None:
@@ -720,12 +716,12 @@ def run_post_process_job(job: PostProcessJob) -> None:
):
return
- if issue_category not in GROUP_CATEGORY_POST_PROCESS_PIPELINE:
- # pipeline for generic issues
- pipeline = GENERIC_POST_PROCESS_PIPELINE
- else:
+ if issue_category in GROUP_CATEGORY_POST_PROCESS_PIPELINE:
# specific pipelines for issue types
pipeline = GROUP_CATEGORY_POST_PROCESS_PIPELINE[issue_category]
+ else:
+ # pipeline for generic issues
+ pipeline = GENERIC_POST_PROCESS_PIPELINE
for pipeline_step in pipeline:
try:
@@ -994,6 +990,29 @@ def _get_replay_id(event):
)
+def process_workflow_engine(job: PostProcessJob) -> None:
+ if job["is_reprocessed"]:
+ return
+
+ # TODO - Add a rollout flag check here, if it's not enabled, call process_rules
+ # If the flag is enabled, use the code below
+ from sentry.workflow_engine.processors.workflow import process_workflows
+
+ # PostProcessJob event is optional, WorkflowJob event is required
+ if "event" not in job:
+ logger.error("Missing event to create WorkflowJob", extra={"job": job})
+ return
+
+ try:
+ workflow_job = WorkflowJob({**job}) # type: ignore[typeddict-item]
+ except Exception:
+ logger.exception("Could not create WorkflowJob", extra={"job": job})
+ return
+
+ with sentry_sdk.start_span(op="tasks.post_process_group.workflow_engine.process_workflow"):
+ process_workflows(workflow_job)
+
+
def process_rules(job: PostProcessJob) -> None:
if job["is_reprocessed"]:
return
@@ -1558,6 +1577,9 @@ def detect_base_urls_for_uptime(job: PostProcessJob):
feedback_filter_decorator(process_inbox_adds),
feedback_filter_decorator(process_rules),
],
+ GroupCategory.METRIC_ALERT: [
+ process_workflow_engine,
+ ],
}
GENERIC_POST_PROCESS_PIPELINE = [
diff --git a/src/sentry/tasks/statistical_detectors.py b/src/sentry/tasks/statistical_detectors.py
index acc63ab4a7a6d6..8a1f1259ab94e4 100644
--- a/src/sentry/tasks/statistical_detectors.py
+++ b/src/sentry/tasks/statistical_detectors.py
@@ -236,6 +236,10 @@ class EndpointRegressionDetector(RegressionDetector):
resolution_rel_threshold = 0.1
escalation_rel_threshold = 0.75
+ @classmethod
+ def min_throughput_threshold(cls) -> int:
+ return options.get("statistical_detectors.throughput.threshold.transactions")
+
@classmethod
def detector_algorithm_factory(cls) -> DetectorAlgorithm:
return MovingAverageRelativeChangeDetector(
@@ -278,6 +282,10 @@ class FunctionRegressionDetector(RegressionDetector):
resolution_rel_threshold = 0.1
escalation_rel_threshold = 0.75
+ @classmethod
+ def min_throughput_threshold(cls) -> int:
+ return options.get("statistical_detectors.throughput.threshold.functions")
+
@classmethod
def detector_algorithm_factory(cls) -> DetectorAlgorithm:
return MovingAverageRelativeChangeDetector(
diff --git a/src/sentry/taskworker/client.py b/src/sentry/taskworker/client.py
index bb1549d4d3aa4a..6beb6886b30e51 100644
--- a/src/sentry/taskworker/client.py
+++ b/src/sentry/taskworker/client.py
@@ -2,6 +2,7 @@
import grpc
from sentry_protos.sentry.v1.taskworker_pb2 import (
+ FetchNextTask,
GetTaskRequest,
SetTaskStatusRequest,
TaskActivation,
@@ -24,13 +25,14 @@ def __init__(self, host: str) -> None:
self._channel = grpc.insecure_channel(self._host)
self._stub = ConsumerServiceStub(self._channel)
- def get_task(self) -> TaskActivation | None:
+ def get_task(self, namespace: str | None = None) -> TaskActivation | None:
"""
- Fetch a pending task
+ Fetch a pending task.
- Will return None when there are no tasks to fetch
+ If a namespace is provided, only tasks for that namespace will be fetched.
+ This will return None if there are no tasks to fetch.
"""
- request = GetTaskRequest()
+ request = GetTaskRequest(namespace=namespace)
try:
response = self._stub.GetTask(request)
except grpc.RpcError as err:
@@ -42,7 +44,7 @@ def get_task(self) -> TaskActivation | None:
return None
def update_task(
- self, task_id: str, status: TaskActivationStatus.ValueType, fetch_next: bool = True
+ self, task_id: str, status: TaskActivationStatus.ValueType, fetch_next_task: FetchNextTask
) -> TaskActivation | None:
"""
Update the status for a given task activation.
@@ -52,7 +54,7 @@ def update_task(
request = SetTaskStatusRequest(
id=task_id,
status=status,
- fetch_next=fetch_next,
+ fetch_next_task=fetch_next_task,
)
try:
response = self._stub.SetTaskStatus(request)
diff --git a/src/sentry/taskworker/registry.py b/src/sentry/taskworker/registry.py
index 33423dbf2f3150..90ab9d4e8b55e4 100644
--- a/src/sentry/taskworker/registry.py
+++ b/src/sentry/taskworker/registry.py
@@ -57,11 +57,19 @@ def producer(self) -> KafkaProducer:
return self._producer
def get(self, name: str) -> Task[Any, Any]:
+ """
+ Get a registered task by name
+
+ Raises KeyError when an unknown task is provided.
+ """
if name not in self._registered_tasks:
raise KeyError(f"No task registered with the name {name}. Check your imports")
return self._registered_tasks[name]
def contains(self, name: str) -> bool:
+ """
+ Check if a task name has been registered
+ """
return name in self._registered_tasks
def register(
@@ -80,6 +88,7 @@ def register(
asynchronously via taskworkers.
Parameters
+ ----------
name: str
The name of the task. This is serialized and must be stable across deploys.
diff --git a/src/sentry/taskworker/task.py b/src/sentry/taskworker/task.py
index 9b7525b0205046..920bcd727fc148 100644
--- a/src/sentry/taskworker/task.py
+++ b/src/sentry/taskworker/task.py
@@ -7,6 +7,7 @@
from uuid import uuid4
import orjson
+import sentry_sdk
from django.conf import settings
from django.utils import timezone
from google.protobuf.timestamp_pb2 import Timestamp
@@ -58,15 +59,31 @@ def retry(self) -> Retry | None:
return self._retry
def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R:
+ """
+ Call the task function immediately.
+ """
return self._func(*args, **kwargs)
def delay(self, *args: P.args, **kwargs: P.kwargs) -> None:
+ """
+ Schedule a task to run later with a set of arguments.
+
+ The provided parameters will be JSON encoded and stored within
+ a `TaskActivation` protobuf that is appended to kafka
+ """
self.apply_async(*args, **kwargs)
def apply_async(self, *args: P.args, **kwargs: P.kwargs) -> None:
+ """
+ Schedule a task to run later with a set of arguments.
+
+ The provided parameters will be JSON encoded and stored within
+ a `TaskActivation` protobuf that is appended to kafka
+ """
if settings.TASK_WORKER_ALWAYS_EAGER:
self._func(*args, **kwargs)
else:
+ # TODO(taskworker) promote parameters to headers
self._namespace.send_task(self.create_activation(*args, **kwargs))
def create_activation(self, *args: P.args, **kwargs: P.kwargs) -> TaskActivation:
@@ -81,10 +98,16 @@ def create_activation(self, *args: P.args, **kwargs: P.kwargs) -> TaskActivation
if isinstance(expires, datetime.timedelta):
expires = int(expires.total_seconds())
+ headers = {
+ "sentry-trace": sentry_sdk.get_traceparent() or "",
+ "baggage": sentry_sdk.get_baggage() or "",
+ }
+
return TaskActivation(
id=uuid4().hex,
namespace=self._namespace.name,
taskname=self.name,
+ headers=headers,
parameters=orjson.dumps({"args": args, "kwargs": kwargs}).decode("utf8"),
retry_state=self._create_retry_state(),
received_at=received_at,
diff --git a/src/sentry/taskworker/tasks/examples.py b/src/sentry/taskworker/tasks/examples.py
index 2e077d70e9b88b..ad623fbe1b706a 100644
--- a/src/sentry/taskworker/tasks/examples.py
+++ b/src/sentry/taskworker/tasks/examples.py
@@ -3,11 +3,54 @@
import logging
from sentry.taskworker.registry import taskregistry
+from sentry.taskworker.retry import LastAction, Retry, RetryError
logger = logging.getLogger(__name__)
+
exampletasks = taskregistry.create_namespace(name="examples")
@exampletasks.register(name="examples.say_hello")
def say_hello(name: str) -> None:
- print(f"Hello {name}") # noqa
+ logger.info("Hello %s", name)
+
+
+@exampletasks.register(
+ name="examples.retry_deadletter", retry=Retry(times=2, times_exceeded=LastAction.Deadletter)
+)
+def retry_deadletter() -> None:
+ raise RetryError
+
+
+@exampletasks.register(
+ name="examples.will_retry",
+ retry=Retry(times=3, on=(RuntimeError,), times_exceeded=LastAction.Discard),
+)
+def will_retry(failure: str) -> None:
+ if failure == "retry":
+ logger.info("going to retry with explicit retry error")
+ raise RetryError
+ if failure == "raise":
+ logger.info("raising runtimeerror")
+ raise RuntimeError("oh no")
+ logger.info("got %s", failure)
+
+
+@exampletasks.register(name="examples.simple_task")
+def simple_task() -> None:
+ logger.info("simple_task complete")
+
+
+@exampletasks.register(name="examples.retry_task", retry=Retry(times=2))
+def retry_task() -> None:
+ raise RetryError
+
+
+@exampletasks.register(name="examples.fail_task")
+def fail_task() -> None:
+ raise ValueError("nope")
+
+
+@exampletasks.register(name="examples.at_most_once", at_most_once=True)
+def at_most_once_task() -> None:
+ pass
diff --git a/src/sentry/taskworker/worker.py b/src/sentry/taskworker/worker.py
index bef131a61298de..5153dafe6de5b2 100644
--- a/src/sentry/taskworker/worker.py
+++ b/src/sentry/taskworker/worker.py
@@ -10,12 +10,14 @@
import grpc
import orjson
+import sentry_sdk
from django.conf import settings
from django.core.cache import cache
from sentry_protos.sentry.v1.taskworker_pb2 import (
TASK_ACTIVATION_STATUS_COMPLETE,
TASK_ACTIVATION_STATUS_FAILURE,
TASK_ACTIVATION_STATUS_RETRY,
+ FetchNextTask,
TaskActivation,
)
@@ -23,18 +25,36 @@
from sentry.taskworker.registry import taskregistry
from sentry.taskworker.task import Task
from sentry.utils import metrics
+from sentry.utils.memory import track_memory_usage
logger = logging.getLogger("sentry.taskworker.worker")
-# Use forking processes so that django is initialized
-mp_context = multiprocessing.get_context("fork")
+mp_context = multiprocessing.get_context("spawn")
-def _process_activation(
- namespace: str, task_name: str, args: list[Any], kwargs: dict[str, Any]
-) -> None:
+def _init_pool_process() -> None:
+ """initialize pool workers by loading all task modules"""
+ for module in settings.TASKWORKER_IMPORTS:
+ __import__(module)
+
+
+def _process_activation(activation: TaskActivation) -> None:
"""multiprocess worker method"""
- taskregistry.get(namespace).get(task_name)(*args, **kwargs)
+ parameters = orjson.loads(activation.parameters)
+ args = parameters.get("args", [])
+ kwargs = parameters.get("kwargs", {})
+ headers = {k: v for k, v in activation.headers.items()}
+
+ transaction = sentry_sdk.continue_trace(
+ environ_or_headers=headers,
+ op="task.taskworker",
+ name=f"{activation.namespace}:{activation.taskname}",
+ )
+ with (
+ track_memory_usage("taskworker.worker.memory_change"),
+ sentry_sdk.start_transaction(transaction),
+ ):
+ taskregistry.get(activation.namespace).get(activation.taskname)(*args, **kwargs)
AT_MOST_ONCE_TIMEOUT = 60 * 60 * 24 # 1 day
@@ -57,12 +77,17 @@ class TaskWorker:
"""
def __init__(
- self, rpc_host: str, max_task_count: int | None = None, **options: dict[str, Any]
+ self,
+ rpc_host: str,
+ max_task_count: int | None = None,
+ namespace: str | None = None,
+ **options: dict[str, Any],
) -> None:
self.options = options
self._execution_count = 0
self._worker_id = uuid4().hex
self._max_task_count = max_task_count
+ self._namespace = namespace
self.client = TaskworkerClient(rpc_host)
self._pool: Pool | None = None
self._build_pool()
@@ -74,7 +99,7 @@ def __del__(self) -> None:
def _build_pool(self) -> None:
if self._pool:
self._pool.terminate()
- self._pool = mp_context.Pool(processes=1)
+ self._pool = mp_context.Pool(processes=1, initializer=_init_pool_process)
def do_imports(self) -> None:
for module in settings.TASKWORKER_IMPORTS:
@@ -124,7 +149,7 @@ def start(self) -> int:
def fetch_task(self) -> TaskActivation | None:
try:
- activation = self.client.get_task()
+ activation = self.client.get_task(self._namespace)
except grpc.RpcError:
metrics.incr("taskworker.worker.get_task.failed")
logger.info("get_task failed. Retrying in 1 second")
@@ -132,7 +157,7 @@ def fetch_task(self) -> TaskActivation | None:
if not activation:
metrics.incr("taskworker.worker.get_task.not_found")
- logger.info("No task fetched")
+ logger.debug("No task fetched")
return None
metrics.incr("taskworker.worker.get_task.success")
@@ -167,6 +192,7 @@ def process_task(self, activation: TaskActivation) -> TaskActivation | None:
return self.client.update_task(
task_id=activation.id,
status=TASK_ACTIVATION_STATUS_FAILURE,
+ fetch_next_task=FetchNextTask(namespace=self._namespace),
)
if task.at_most_once:
@@ -187,17 +213,11 @@ def process_task(self, activation: TaskActivation) -> TaskActivation | None:
result = None
execution_start_time = 0.0
try:
- task_data_parameters = orjson.loads(activation.parameters)
execution_start_time = time.time()
result = self._pool.apply_async(
func=_process_activation,
- args=(
- activation.namespace,
- activation.taskname,
- task_data_parameters["args"],
- task_data_parameters["kwargs"],
- ),
+ args=(activation,),
)
# Will trigger a TimeoutError if the task execution runs long
result.get(timeout=processing_timeout)
@@ -260,4 +280,5 @@ def process_task(self, activation: TaskActivation) -> TaskActivation | None:
return self.client.update_task(
task_id=activation.id,
status=next_state,
+ fetch_next_task=FetchNextTask(namespace=self._namespace),
)
diff --git a/static/app/views/performance/settings.ts b/src/sentry/tempest/__init__.py
similarity index 100%
rename from static/app/views/performance/settings.ts
rename to src/sentry/tempest/__init__.py
diff --git a/src/sentry/tempest/apps.py b/src/sentry/tempest/apps.py
new file mode 100644
index 00000000000000..9dbe005be9753f
--- /dev/null
+++ b/src/sentry/tempest/apps.py
@@ -0,0 +1,5 @@
+from django.apps import AppConfig
+
+
+class TempestConfig(AppConfig):
+ name = "sentry.tempest"
diff --git a/src/sentry/tempest/endpoints/tempest_credentials.py b/src/sentry/tempest/endpoints/tempest_credentials.py
new file mode 100644
index 00000000000000..34c05de20e5665
--- /dev/null
+++ b/src/sentry/tempest/endpoints/tempest_credentials.py
@@ -0,0 +1,58 @@
+from django.db import IntegrityError
+from rest_framework.exceptions import NotFound
+from rest_framework.request import Request
+from rest_framework.response import Response
+
+from sentry import features
+from sentry.api.api_owners import ApiOwner
+from sentry.api.api_publish_status import ApiPublishStatus
+from sentry.api.base import region_silo_endpoint
+from sentry.api.bases import ProjectEndpoint
+from sentry.api.paginator import OffsetPaginator
+from sentry.api.serializers.base import serialize
+from sentry.models.project import Project
+from sentry.tempest.models import TempestCredentials
+from sentry.tempest.permissions import TempestCredentialsPermission
+from sentry.tempest.serializers import DRFTempestCredentialsSerializer, TempestCredentialsSerializer
+
+
+@region_silo_endpoint
+class TempestCredentialsEndpoint(ProjectEndpoint):
+ publish_status = {
+ "GET": ApiPublishStatus.PRIVATE,
+ "POST": ApiPublishStatus.PRIVATE,
+ }
+ owner = ApiOwner.GDX
+
+ permission_classes = (TempestCredentialsPermission,)
+
+ def has_feature(self, request: Request, project: Project) -> bool:
+ return features.has(
+ "organizations:tempest-access", project.organization, actor=request.user
+ )
+
+ def get(self, request: Request, project: Project) -> Response:
+ if not self.has_feature(request, project):
+ raise NotFound
+
+ tempest_credentials_qs = TempestCredentials.objects.filter(project=project)
+ return self.paginate(
+ request=request,
+ queryset=tempest_credentials_qs,
+ on_results=lambda x: serialize(x, request.user, TempestCredentialsSerializer()),
+ paginator_cls=OffsetPaginator,
+ )
+
+ def post(self, request: Request, project: Project) -> Response:
+ if not self.has_feature(request, project):
+ raise NotFound
+
+ serializer = DRFTempestCredentialsSerializer(data=request.data)
+ serializer.is_valid(raise_exception=True)
+ try:
+ serializer.save(created_by_id=request.user.id, project=project)
+ except IntegrityError:
+ return Response(
+ {"detail": "A credential with this client ID already exists."}, status=400
+ )
+ return Response(serializer.data, status=201)
diff --git a/src/sentry/tempest/endpoints/tempest_credentials_details.py b/src/sentry/tempest/endpoints/tempest_credentials_details.py
new file mode 100644
index 00000000000000..abebd916ae8071
--- /dev/null
+++ b/src/sentry/tempest/endpoints/tempest_credentials_details.py
@@ -0,0 +1,34 @@
+from rest_framework.exceptions import NotFound
+from rest_framework.request import Request
+from rest_framework.response import Response
+
+from sentry import features
+from sentry.api.api_owners import ApiOwner
+from sentry.api.api_publish_status import ApiPublishStatus
+from sentry.api.base import region_silo_endpoint
+from sentry.api.bases import ProjectEndpoint
+from sentry.models.project import Project
+from sentry.tempest.models import TempestCredentials
+from sentry.tempest.permissions import TempestCredentialsPermission
+
+
+@region_silo_endpoint
+class TempestCredentialsDetailsEndpoint(ProjectEndpoint):
+ publish_status = {
+ "DELETE": ApiPublishStatus.PRIVATE,
+ }
+ owner = ApiOwner.GDX
+
+ permission_classes = (TempestCredentialsPermission,)
+
+ def has_feature(self, request: Request, project: Project) -> bool:
+ return features.has(
+ "organizations:tempest-access", project.organization, actor=request.user
+ )
+
+ def delete(self, request: Request, project: Project, tempest_credentials_id: int) -> Response:
+ if not self.has_feature(request, project):
+ raise NotFound
+
+ TempestCredentials.objects.filter(project=project, id=tempest_credentials_id).delete()
+ return Response(status=204)
diff --git a/src/sentry/tempest/migrations/0001_create_tempest_credentials_model.py b/src/sentry/tempest/migrations/0001_create_tempest_credentials_model.py
new file mode 100644
index 00000000000000..e528576fa44d1c
--- /dev/null
+++ b/src/sentry/tempest/migrations/0001_create_tempest_credentials_model.py
@@ -0,0 +1,72 @@
+# Generated by Django 5.1.4 on 2024-12-17 00:21
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+import sentry.db.models.fields.bounded
+import sentry.db.models.fields.foreignkey
+import sentry.db.models.fields.hybrid_cloud_foreign_key
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ initial = True
+
+ dependencies = [
+ ("sentry", "0802_remove_grouping_auto_update_option"),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="TempestCredentials",
+ fields=[
+ (
+ "id",
+ sentry.db.models.fields.bounded.BoundedBigAutoField(
+ primary_key=True, serialize=False
+ ),
+ ),
+ ("date_updated", models.DateTimeField(auto_now=True)),
+ ("date_added", models.DateTimeField(auto_now_add=True)),
+ (
+ "created_by_id",
+ sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey(
+ "sentry.User", db_index=True, null=True, on_delete="SET_NULL"
+ ),
+ ),
+ ("message", models.TextField()),
+ ("message_type", models.CharField(default="error", max_length=20)),
+ ("client_id", models.CharField()),
+ ("client_secret", models.CharField()),
+ ("latest_fetched_item_id", models.CharField(null=True)),
+ (
+ "project",
+ sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ on_delete=django.db.models.deletion.CASCADE, to="sentry.project"
+ ),
+ ),
+ ],
+ options={
+ "constraints": [
+ models.UniqueConstraint(
+ fields=("client_id", "project"),
+ name="sentry_tempestcredentials_client_project_uniq",
+ )
+ ],
+ },
+ ),
+ ]
diff --git a/tests/sentry/hybridcloud/test_organization_provisioning.py b/src/sentry/tempest/migrations/__init__.py
similarity index 100%
rename from tests/sentry/hybridcloud/test_organization_provisioning.py
rename to src/sentry/tempest/migrations/__init__.py
diff --git a/src/sentry/tempest/models.py b/src/sentry/tempest/models.py
new file mode 100644
index 00000000000000..63e019ea5a59e7
--- /dev/null
+++ b/src/sentry/tempest/models.py
@@ -0,0 +1,44 @@
+from django.conf import settings
+from django.db import models
+from django.db.models import UniqueConstraint
+
+from sentry.backup.scopes import RelocationScope
+from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model
+from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey
+
+
+class MessageType(models.TextChoices):
+ ERROR = "error"
+ WARNING = "warning"
+ SUCCESS = "success"
+ INFO = "info"
+
+
+@region_silo_model
+class TempestCredentials(DefaultFieldsModel):
+ # Contains sensitive information which we don't want to export/import - it should be configured again manually
+ __relocation_scope__ = RelocationScope.Excluded
+
+ created_by_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete="SET_NULL")
+ project = FlexibleForeignKey("sentry.Project", on_delete=models.CASCADE)
+
+ # message that is shown next to the client id/secret pair
+ # used to communicate the status of the latest actions with credentials
+ message = models.TextField()
+ message_type = models.CharField(
+ max_length=20, choices=MessageType.choices, default=MessageType.ERROR
+ )
+
+ client_id = models.CharField()
+ client_secret = models.CharField()
+
+ # id of the latest item fetched via tempest
+ latest_fetched_item_id = models.CharField(null=True)
+
+ class Meta:
+ constraints = [
+ UniqueConstraint(
+ fields=["client_id", "project"],
+ name="sentry_tempestcredentials_client_project_uniq",
+ )
+ ]
diff --git a/src/sentry/tempest/permissions.py b/src/sentry/tempest/permissions.py
new file mode 100644
index 00000000000000..9e64eee7f0628e
--- /dev/null
+++ b/src/sentry/tempest/permissions.py
@@ -0,0 +1,16 @@
+from sentry.api.bases.project import ProjectPermission
+
+
+class TempestCredentialsPermission(ProjectPermission):
+ scope_map = {
+ "GET": [
+ "project:read",
+ "project:write",
+ "project:admin",
+ "org:read",
+ "org:write",
+ "org:admin",
+ ],
+ "POST": ["org:admin"],
+ "DELETE": ["org:admin"],
+ }
diff --git a/src/sentry/tempest/serializers.py b/src/sentry/tempest/serializers.py
new file mode 100644
index 00000000000000..625e636be2de86
--- /dev/null
+++ b/src/sentry/tempest/serializers.py
@@ -0,0 +1,69 @@
+from rest_framework import serializers
+
+from sentry.api.serializers.base import Serializer, register
+from sentry.tempest.models import TempestCredentials
+from sentry.users.services.user.service import user_service
+
+
+@register(TempestCredentials)
+class TempestCredentialsSerializer(Serializer):
+ def _obfuscate_client_secret(self, client_secret: str) -> str:
+ return "*" * len(client_secret)
+
+ def get_attrs(
+ self,
+ item_list,
+ user,
+ **kwargs,
+ ):
+ users_mapping = {}
+ user_ids = [item.created_by_id for item in item_list if item.created_by_id is not None]
+ users = user_service.get_many_by_id(ids=user_ids)
+ for rpc_user in users:
+ users_mapping[rpc_user.id] = rpc_user
+
+ attrs = {}
+ for item in item_list:
+ attrs[item] = users_mapping.get(item.created_by_id)
+
+ return attrs
+
+ def serialize(self, obj, attrs, user, **kwargs):
+ rpc_user = attrs
+ return {
+ "id": obj.id,
+ "clientId": obj.client_id,
+ "clientSecret": self._obfuscate_client_secret(obj.client_secret),
+ "message": obj.message,
+ "messageType": obj.message_type,
+ "latestFetchedItemId": obj.latest_fetched_item_id,
+ "createdById": obj.created_by_id,
+ "createdByEmail": rpc_user.email if rpc_user else None,
+ "dateAdded": obj.date_added,
+ "dateUpdated": obj.date_updated,
+ }
+
+
+class DRFTempestCredentialsSerializer(serializers.ModelSerializer):
+ clientId = serializers.CharField(source="client_id")
+ clientSecret = serializers.CharField(source="client_secret")
+ message = serializers.CharField(read_only=True)
+ messageType = serializers.CharField(source="message_type", read_only=True)
+ latestFetchedItemId = serializers.CharField(source="latest_fetched_item_id", read_only=True)
+ createdById = serializers.CharField(source="created_by_id", read_only=True)
+ dateAdded = serializers.DateTimeField(source="date_added", read_only=True)
+ dateUpdated = serializers.DateTimeField(source="date_updated", read_only=True)
+
+ class Meta:
+ model = TempestCredentials
+ fields = [
+ "id",
+ "clientId",
+ "clientSecret",
+ "message",
+ "messageType",
+ "latestFetchedItemId",
+ "createdById",
+ "dateAdded",
+ "dateUpdated",
+ ]
diff --git a/src/sentry/templates/sentry/base-react.html b/src/sentry/templates/sentry/base-react.html
index d8c0ffd686f3b1..ab3e218318e48a 100644
--- a/src/sentry/templates/sentry/base-react.html
+++ b/src/sentry/templates/sentry/base-react.html
@@ -18,3 +18,5 @@
{% endblock %}
+
+{% block wrapperclass %}{{ user_theme }}{% endblock %}
diff --git a/src/sentry/templates/sentry/bases/forceauth_modal.html b/src/sentry/templates/sentry/bases/forceauth_modal.html
deleted file mode 100644
index a9adbebfe622a8..00000000000000
--- a/src/sentry/templates/sentry/bases/forceauth_modal.html
+++ /dev/null
@@ -1,11 +0,0 @@
-{% extends "sentry/layout.html" %}
-
-{% load i18n %}
-
-{% block wrapperclass %}{{ block.super }} narrow hide-sidebar{% endblock %}
-
-{% block content %}
- Account : {{ request.user.get_display_name }}
-
- {% block main %}{% endblock %}
-{% endblock %}
diff --git a/src/sentry/templates/sentry/emails/mfa-too-many-attempts.html b/src/sentry/templates/sentry/emails/mfa-too-many-attempts.html
index a1b36df88d2900..8a17b70a2e02ea 100644
--- a/src/sentry/templates/sentry/emails/mfa-too-many-attempts.html
+++ b/src/sentry/templates/sentry/emails/mfa-too-many-attempts.html
@@ -13,4 +13,5 @@ Suspicious Activity Detected
Date: {{ datetime|date:"N j, Y, P e" }}
If you have lost your 2FA credentials, you can follow our account recovery steps here .
If these logins are not from you, we recommend you log in to your Sentry account and reset your password under your account security settings . On the same account security page, we also recommend you click the “Sign out of all devices” button to remove all currently logged-in sessions of your account.
+ If you are unable to log in to your Sentry account for the password reset, you can use Password Recovery .
{% endblock %}
diff --git a/src/sentry/templates/sentry/emails/mfa-too-many-attempts.txt b/src/sentry/templates/sentry/emails/mfa-too-many-attempts.txt
index 7ec831bc6473f2..192300b5ef7543 100644
--- a/src/sentry/templates/sentry/emails/mfa-too-many-attempts.txt
+++ b/src/sentry/templates/sentry/emails/mfa-too-many-attempts.txt
@@ -13,3 +13,6 @@ If these logins are not from you, we recommend you log in to your Sentry account
{{ url }}
On the same account security page, we also recommend you click the “Sign out of all devices” button to remove all currently logged-in sessions of your account.
+
+If you are unable to log in to your Sentry account for the password reset, you can use Password Recovery:
+{{ recover_url }}
diff --git a/src/sentry/templates/sentry/integrations/discord/linked.html b/src/sentry/templates/sentry/integrations/discord/linked.html
index 4ec0bd00bcd2d5..5a834df64dff19 100644
--- a/src/sentry/templates/sentry/integrations/discord/linked.html
+++ b/src/sentry/templates/sentry/integrations/discord/linked.html
@@ -1,14 +1,16 @@
{% extends "sentry/bases/modal.html" %}
-
{% load i18n %}
-
{% block title %}{% trans "Discord Linked" %} | {{ block.super }}{% endblock %}
{% block wrapperclass %}narrow auth{% endblock %}
-
{% block main %}
{% trans "Your Discord account has been associated with your Sentry account. You may now take Sentry actions through Discord." %}
+
+
+ Go back to Discord
+
+
{% endblock %}
diff --git a/src/sentry/templates/sentry/partial/alerts.html b/src/sentry/templates/sentry/partial/alerts.html
index 2cf4bf1be3ecee..7a41552430bb52 100644
--- a/src/sentry/templates/sentry/partial/alerts.html
+++ b/src/sentry/templates/sentry/partial/alerts.html
@@ -77,7 +77,7 @@
{% elif show_login_banner %}
- Join our workshop to for a live demo of Sentry's new AI tools on Dec. 10.  
RSVP
+ Join our live workshop: Smarter Tools and Best Practices for Mobile Debugging on Jan 14.  
RSVP
{% endif %}
diff --git a/src/sentry/templates/sentry/toolbar/iframe.html b/src/sentry/templates/sentry/toolbar/iframe.html
index f35185ab3311a1..a488d33d093668 100644
--- a/src/sentry/templates/sentry/toolbar/iframe.html
+++ b/src/sentry/templates/sentry/toolbar/iframe.html
@@ -1,11 +1,13 @@
{% comment %}
Template returned for requests to /iframe. The iframe serves as a proxy for Sentry API requests.
Required context variables:
-- referrer: string. HTTP header from the request object.
-- state: string. One of: `logged-out`, `missing-project`, `invalid-domain` or `success`.
+- referrer: string. HTTP header from the request object. May have trailing `/`.
+- state: string. One of: `logged-out`, `missing-project`, `invalid-domain` or `logged-in`.
- logging: any. If the value is truthy in JavaScript then debug logging will be enabled.
- organization_slug: string. The org named in the url params
- project_id_or_slug: string | int. The project named in the url params
+- organizationUrl: string. Result of generate_organization_url()
+- regionUrl: string. Result of generate_region_url()
{% endcomment %}
{% load sentry_helpers %}
{% load sentry_assets %}
@@ -24,6 +26,11 @@
const logging = '{{ logging|escapejs }}';
const organizationSlug = '{{ organization_slug|escapejs }}';
const projectIdOrSlug = '{{ project_id_or_slug|escapejs }}';
+ const organizationUrl = '{{ organization_url|escapejs }}';
+ const regionUrl = '{{ region_url|escapejs }}';
+
+ // Strip the trailing `/` from the url
+ const referrerOrigin = new URL(referrer).origin;
function log(...args) {
if (logging) {
@@ -31,145 +38,161 @@
}
}
- function requestAuthn(delay_ms) {
- const origin = window.location.origin.endsWith('.sentry.io')
- ? 'https://sentry.io'
- : window.location.origin;
-
- window.open(
- `${origin}/toolbar/${organizationSlug}/${projectIdOrSlug}/login-success/?delay=${delay_ms ?? '0'}`,
- 'sentry-toolbar-auth-popup',
- 'popup=true,innerWidth=800,innerHeight=550,noopener=false'
- );
- }
-
/**
- * This should only be called on pageload, which is when the server has
- * checked for auth, project validity, and domain config first.
- *
- * Also to be called when we clear auth tokens.
+ * This is called on pageload, and whenever login tokens are cleared.
+ * Pageload when the server has checked for auth, project validity, and
+ * domain config first, so we can trust a state that is elevated above logged-out
*/
- function sendStateMessage(state) {
- log('sendStateMessage(state)', { state });
- window.parent.postMessage({
- source: 'sentry-toolbar',
- message: state
- }, referrer);
+ function postStateMessage(state) {
+ log('parent.postMessage()', { state, referrerOrigin });
+ window.parent.postMessage({ source: 'sentry-toolbar', message: state }, referrerOrigin);
}
- function listenForLoginSuccess() {
- window.addEventListener('message', messageEvent => {
- if (messageEvent.origin !== document.location.origin || messageEvent.data.source !== 'sentry-toolbar') {
- return;
- }
-
- log('window.onMessage', messageEvent.data, messageEvent);
- if (messageEvent.data.message === 'did-login') {
- saveAccessToken(messageEvent.data);
- window.location.reload();
- }
- });
+ function handleLoginWindowMessage(messageEvent) {
+ handleWindowMessage(messageEvent, document.location.origin, loginWindowMessageDispatch);
}
- function getCookieValue(cookie) {
- return `${cookie}; domain=${window.location.hostname}; path=/; max-age=31536000; SameSite=none; partitioned; secure`;
+ function handleParentWindowMessage(messageEvent) {
+ handleWindowMessage(messageEvent, referrerOrigin, parentWindowMessageDispatch);
}
- function saveAccessToken(data) {
- log('saveAccessToken', data)
- if (data.cookie) {
- document.cookie = getCookieValue(data.cookie);
- log('Saved a cookie', document.cookie.indexOf(data.cookie) >= 0);
- }
- if (data.token) {
- localStorage.setItem('accessToken', data.token);
- log('Saved an accessToken to localStorage');
+ function handleWindowMessage(messageEvent, requiredOrigin, dispatch) {
+ const isValidOrigin = messageEvent.origin === requiredOrigin;
+ if (!isValidOrigin) {
+ return;
}
- if (!data.cookie && !data.token) {
- log('Unexpected: No access token found!');
+ log('window.onMessage', messageEvent);
+ const { message, source } = messageEvent.data;
+ if (source !== 'sentry-toolbar' || !message || !(Object.hasOwn(dispatch, message))) {
+ return;
}
+ dispatch[message].call(undefined, messageEvent.data);
}
- function clearAuthn() {
- document.cookie = getCookieValue(document.cookie.split('=').at(0) + '=');
- log('Cleared the current cookie');
- const accessToken = localStorage.removeItem('accessToken')
- log('Removed accessToken from localStorage');
-
- sendStateMessage('logged-out');
- }
-
- async function fetchProxy(url, init) {
- // If we have an accessToken lets use it. Otherwise we presume a cookie will be set.
- const accessToken = localStorage.getItem('accessToken');
- const bearer = accessToken ? { 'Authorization': `Bearer ${accessToken}` } : {};
-
- // If either of these is invalid, or both are missing, we will
- // forward the resulting 401 to the application, which will request
- // tokens be destroyed and reload the iframe in an unauth state.
- log('Has access info', { cookie: Boolean(document.cookie), accessToken: Boolean(accessToken) });
-
- const initWithCreds = {
- ...init,
- headers: { ...init.headers, ...bearer },
- credentials: 'same-origin',
- };
- log({ initWithCreds });
-
- const response = await fetch(url, initWithCreds);
- return {
- ok: response.ok,
- status: response.status,
- statusText: response.statusText,
- url: response.url,
- headers: Object.fromEntries(response.headers.entries()),
- text: await response.text(),
- };
- }
-
- function setupMessageChannel() {
+ function getMessagePort() {
log('setupMessageChannel()');
const { port1, port2 } = new MessageChannel();
- const messageDispatch = {
- 'log': log,
- 'request-authn': requestAuthn,
- 'clear-authn': clearAuthn,
- 'fetch': fetchProxy,
- };
-
- port1.addEventListener('message', messageEvent => {
+ const handlePortMessage = (messageEvent) => {
log('port.onMessage', messageEvent.data);
const { $id, message } = messageEvent.data;
- if (!$id) {
- return; // MessageEvent is malformed, missing $id
- }
-
- if (!message.$function || !(Object.hasOwn(messageDispatch, message.$function))) {
- return; // No-op without a $function to call
+ if (!$id || !message.$function || !(Object.hasOwn(postMessageDispatch, message.$function))) {
+ return;
}
- Promise.resolve(messageDispatch[message.$function]
+ Promise.resolve(postMessageDispatch[message.$function]
.apply(undefined, message.$args || []))
.then($result => port1.postMessage({ $id, $result }))
.catch(error => port1.postMessage({ $id, $error: error }));
- });
+ };
+
+ port1.addEventListener('message', handlePortMessage);
port1.start();
- window.parent.postMessage({
- source: 'sentry-toolbar',
- message: 'port-connect',
- }, referrer, [port2]);
+ return port2;
+ }
- log('Sent', { message: 'port-connect', referrer });
+ function getCookieValue(cookie, domain) {
+ return `${cookie}; domain=${domain}; path=/; max-age=31536000; SameSite=none; partitioned; secure`;
}
- log('Init', { referrer, state });
+ const loginWindowMessageDispatch = {
+ 'did-login': ({ cookie, token }) => {
+ if (cookie) {
+ document.cookie = getCookieValue(cookie, window.location.hostname);
+ log('Saved a cookie', document.cookie.indexOf(cookie) >= 0);
+ }
+ if (token) {
+ localStorage.setItem('accessToken', token);
+ log('Saved an accessToken to localStorage');
+ }
+ if (!cookie && !token) {
+ log('Unexpected: No access token found!');
+ }
+
+ postStateMessage('stale');
+ },
+ };
+
+ const parentWindowMessageDispatch = {
+ 'request-login': ({ delay_ms }) => {
+ const origin = window.location.origin.endsWith('.sentry.io')
+ ? 'https://sentry.io'
+ : window.location.origin;
+
+ window.open(
+ `${origin}/toolbar/${organizationSlug}/${projectIdOrSlug}/login-success/?delay=${delay_ms ?? '0'}`,
+ 'sentry-toolbar-auth-popup',
+ 'popup=true,innerWidth=800,innerHeight=550,noopener=false'
+ );
+ log('Opened /login-success/', { delay_ms });
+ },
+
+ 'request-logout': () => {
+ const cookie = document.cookie.split('=').at(0) + '=';
+ document.cookie = getCookieValue(cookie, window.location.hostname);
+ document.cookie = getCookieValue(cookie, regionUrl);
+ log('Cleared the current cookie');
+
+ const accessToken = localStorage.removeItem('accessToken')
+ log('Removed accessToken from localStorage');
+
+ postStateMessage('stale');
+ },
+ };
+
+ const postMessageDispatch = {
+ 'log': log,
+
+ 'fetch': async (path, init) => {
+ // If we have an accessToken lets use it. Otherwise we presume a cookie will be set.
+ const accessToken = localStorage.getItem('accessToken');
+ const bearer = accessToken ? { 'Authorization': `Bearer ${accessToken}` } : {};
+
+ // If either of these is invalid, or both are missing, we will
+ // forward the resulting 401 to the application, which will request
+ // tokens be destroyed and reload the iframe in an unauth state.
+ log('Has access info', { cookie: Boolean(document.cookie), accessToken: Boolean(accessToken) });
+
+ const url = new URL('/api/0' + path, organizationUrl);
+ const initWithCreds = {
+ ...init,
+ headers: { ...init.headers, ...bearer },
+ credentials: 'include',
+ };
+ const response = await fetch(url, initWithCreds);
+ return {
+ ok: response.ok,
+ status: response.status,
+ statusText: response.statusText,
+ url: response.url,
+ headers: Object.fromEntries(response.headers.entries()),
+ text: await response.text(),
+ };
+ },
+ };
+
+ log('Init', { referrerOrigin, state });
+
+ if (state === 'logged-out') {
+ const cookie = document.cookie.split('=').at(0) + '=';
+ document.cookie = getCookieValue(cookie, window.location.hostname);
+ document.cookie = getCookieValue(cookie, regionUrl);
+ }
- setupMessageChannel();
- listenForLoginSuccess();
- sendStateMessage(state);
+ window.addEventListener('message', handleLoginWindowMessage);
+ window.addEventListener('message', handleParentWindowMessage);
+ postStateMessage(state);
+
+ if (state === 'logged-in') {
+ const port = getMessagePort();
+ window.parent.postMessage({
+ source: 'sentry-toolbar',
+ message: 'port-connect',
+ }, referrerOrigin, [port]);
+ log('parent.postMessage()', { message: 'port-connect', referrerOrigin });
+ }
})();
{% endscript %}
diff --git a/src/sentry/testutils/asserts.py b/src/sentry/testutils/asserts.py
index 062fd52e90a127..59b8c64e243235 100644
--- a/src/sentry/testutils/asserts.py
+++ b/src/sentry/testutils/asserts.py
@@ -55,14 +55,6 @@ def assert_org_audit_log_exists(**kwargs):
assert org_audit_log_exists(**kwargs)
-def assert_org_audit_log_does_not_exist(**kwargs):
- assert not org_audit_log_exists(**kwargs)
-
-
-def delete_all_org_audit_logs():
- return AuditLogEntry.objects.all().delete()
-
-
"""
Helper functions to assert integration SLO metrics
"""
@@ -93,3 +85,12 @@ def assert_success_metric(mock_record):
call for call in mock_record.mock_calls if call.args[0] == EventLifecycleOutcome.SUCCESS
)
assert event_success
+
+
+def assert_slo_metric(
+ mock_record, event_outcome: EventLifecycleOutcome = EventLifecycleOutcome.SUCCESS
+):
+ assert len(mock_record.mock_calls) == 2
+ start, end = mock_record.mock_calls
+ assert start.args[0] == EventLifecycleOutcome.STARTED
+ assert end.args[0] == event_outcome
diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py
index a5404c1e9d82a7..dee1be41de057b 100644
--- a/src/sentry/testutils/cases.py
+++ b/src/sentry/testutils/cases.py
@@ -27,14 +27,13 @@
from django.contrib.auth.models import AnonymousUser
from django.core import signing
from django.core.cache import cache
-from django.db import DEFAULT_DB_ALIAS, connection, connections
+from django.db import connection, connections
from django.db.migrations.executor import MigrationExecutor
from django.http import HttpRequest
from django.test import RequestFactory
from django.test import TestCase as DjangoTestCase
from django.test import TransactionTestCase as DjangoTransactionTestCase
from django.test import override_settings
-from django.test.utils import CaptureQueriesContext
from django.urls import resolve, reverse
from django.utils import timezone
from django.utils.functional import cached_property
@@ -90,11 +89,7 @@
from sentry.models.commit import Commit
from sentry.models.commitauthor import CommitAuthor
from sentry.models.dashboard import Dashboard
-from sentry.models.dashboard_widget import (
- DashboardWidget,
- DashboardWidgetDisplayTypes,
- DashboardWidgetQuery,
-)
+from sentry.models.dashboard_widget import DashboardWidget, DashboardWidgetDisplayTypes
from sentry.models.deletedorganization import DeletedOrganization
from sentry.models.deploy import Deploy
from sentry.models.environment import Environment
@@ -118,7 +113,6 @@
from sentry.projects.project_rules.creator import ProjectRuleCreator
from sentry.replays.lib.event_linking import transform_event_for_linking_payload
from sentry.replays.models import ReplayRecordingSegment
-from sentry.rules.base import RuleBase
from sentry.search.events.constants import (
METRIC_FRUSTRATED_TAG_VALUE,
METRIC_SATISFACTION_TAG_KEY,
@@ -138,11 +132,10 @@
from sentry.snuba.metrics.naming_layer.public import TransactionMetricKey
from sentry.tagstore.snuba.backend import SnubaTagStorage
from sentry.testutils.factories import get_fixture_path
-from sentry.testutils.helpers.datetime import before_now, iso_format
+from sentry.testutils.helpers.datetime import before_now
from sentry.testutils.helpers.notifications import TEST_ISSUE_OCCURRENCE
from sentry.testutils.helpers.slack import install_slack
from sentry.testutils.pytest.selenium import Browser
-from sentry.types.condition_activity import ConditionActivity, ConditionActivityType
from sentry.users.models.identity import Identity, IdentityProvider, IdentityStatus
from sentry.users.models.user import User
from sentry.users.models.user_option import UserOption
@@ -168,7 +161,7 @@
from .asserts import assert_status_code
from .factories import Factories
from .fixtures import Fixtures
-from .helpers import AuthProvider, Feature, TaskRunner, override_options, parse_queries
+from .helpers import AuthProvider, Feature, TaskRunner, override_options
from .silo import assume_test_silo_mode
from .skips import requires_snuba
@@ -215,11 +208,6 @@
class BaseTestCase(Fixtures):
- def assertRequiresAuthentication(self, path, method="GET"):
- resp = getattr(self.client, method.lower())(path)
- assert resp.status_code == 302
- assert resp["Location"].startswith("http://testserver" + reverse("sentry-login"))
-
@pytest.fixture(autouse=True)
def setup_dummy_auth_provider(self):
auth.register("dummy", DummyProvider)
@@ -431,18 +419,6 @@ def assert_valid_deleted_log(self, deleted_log, original_object):
assert deleted_log.date_created == original_object.date_added
assert deleted_log.date_deleted >= deleted_log.date_created
- def assertWriteQueries(self, queries, debug=False, *args, **kwargs):
- func = kwargs.pop("func", None)
- using = kwargs.pop("using", DEFAULT_DB_ALIAS)
- conn = connections[using]
-
- context = _AssertQueriesContext(self, queries, debug, conn)
- if func is None:
- return context
-
- with context:
- func(*args, **kwargs)
-
def get_mock_uuid(self):
class uuid:
hex = "abc123"
@@ -451,52 +427,6 @@ class uuid:
return uuid
-class _AssertQueriesContext(CaptureQueriesContext):
- def __init__(self, test_case, queries, debug, connection):
- self.test_case = test_case
- self.queries = queries
- self.debug = debug
- super().__init__(connection)
-
- def __exit__(self, exc_type, exc_value, traceback):
- super().__exit__(exc_type, exc_value, traceback)
- if exc_type is not None:
- return
-
- parsed_queries = parse_queries(self.captured_queries)
-
- if self.debug:
- import pprint
-
- pprint.pprint("====================== Raw Queries ======================")
- pprint.pprint(self.captured_queries)
- pprint.pprint("====================== Table writes ======================")
- pprint.pprint(parsed_queries)
-
- for table, num in parsed_queries.items():
- expected = self.queries.get(table, 0)
- if expected == 0:
- import pprint
-
- pprint.pprint(
- "WARNING: no query against %s emitted, add debug=True to see all the queries"
- % (table)
- )
- else:
- self.test_case.assertTrue(
- num == expected,
- "%d write queries expected on `%s`, got %d, add debug=True to see all the queries"
- % (expected, table, num),
- )
-
- for table, num in self.queries.items():
- executed = parsed_queries.get(table, None)
- self.test_case.assertFalse(
- executed is None,
- "no query against %s emitted, add debug=True to see all the queries" % (table),
- )
-
-
@override_settings(ROOT_URLCONF="sentry.web.urls")
class TestCase(BaseTestCase, DjangoTestCase):
# We need Django to flush all databases.
@@ -952,24 +882,6 @@ def get_state(self, **kwargs):
kwargs.setdefault("has_escalated", False)
return EventState(**kwargs)
- def get_condition_activity(self, **kwargs) -> ConditionActivity:
- kwargs.setdefault("group_id", self.event.group.id)
- kwargs.setdefault("type", ConditionActivityType.CREATE_ISSUE)
- kwargs.setdefault("timestamp", self.event.datetime)
- return ConditionActivity(**kwargs)
-
- def passes_activity(
- self,
- rule: RuleBase,
- condition_activity: ConditionActivity | None = None,
- event_map: dict[str, Any] | None = None,
- ):
- if condition_activity is None:
- condition_activity = self.get_condition_activity()
- if event_map is None:
- event_map = {}
- return rule.passes_activity(condition_activity, event_map)
-
def assertPasses(self, rule, event=None, **kwargs):
if event is None:
event = self.event
@@ -1026,33 +938,9 @@ def assert_member_can_access(self, path, **kwargs):
def assert_manager_can_access(self, path, **kwargs):
return self.assert_role_can_access(path, "manager", **kwargs)
- def assert_teamless_member_can_access(self, path, **kwargs):
- user = self.create_user(is_superuser=False)
- self.create_member(user=user, organization=self.organization, role="member", teams=[])
-
- self.assert_can_access(user, path, **kwargs)
-
def assert_member_cannot_access(self, path, **kwargs):
return self.assert_role_cannot_access(path, "member", **kwargs)
- def assert_manager_cannot_access(self, path, **kwargs):
- return self.assert_role_cannot_access(path, "manager", **kwargs)
-
- def assert_teamless_member_cannot_access(self, path, **kwargs):
- user = self.create_user(is_superuser=False)
- self.create_member(user=user, organization=self.organization, role="member", teams=[])
-
- self.assert_cannot_access(user, path, **kwargs)
-
- def assert_team_admin_can_access(self, path, **kwargs):
- return self.assert_role_can_access(path, "admin", **kwargs)
-
- def assert_teamless_admin_can_access(self, path, **kwargs):
- user = self.create_user(is_superuser=False)
- self.create_member(user=user, organization=self.organization, role="admin", teams=[])
-
- self.assert_can_access(user, path, **kwargs)
-
def assert_team_admin_cannot_access(self, path, **kwargs):
return self.assert_role_cannot_access(path, "admin", **kwargs)
@@ -1062,19 +950,9 @@ def assert_teamless_admin_cannot_access(self, path, **kwargs):
self.assert_cannot_access(user, path, **kwargs)
- def assert_team_owner_can_access(self, path, **kwargs):
- return self.assert_role_can_access(path, "owner", **kwargs)
-
def assert_owner_can_access(self, path, **kwargs):
return self.assert_role_can_access(path, "owner", **kwargs)
- def assert_owner_cannot_access(self, path, **kwargs):
- return self.assert_role_cannot_access(path, "owner", **kwargs)
-
- def assert_non_member_cannot_access(self, path, **kwargs):
- user = self.create_user(is_superuser=False)
- self.assert_cannot_access(user, path, **kwargs)
-
def assert_role_can_access(self, path, role, **kwargs):
user = self.create_user(is_superuser=False)
self.create_member(user=user, organization=self.organization, role=role, teams=[self.team])
@@ -1253,20 +1131,6 @@ def setUp(self):
def initialize(self, reset_snuba, call_snuba):
self.call_snuba = call_snuba
- @contextmanager
- def disable_snuba_query_cache(self):
- self.snuba_update_config({"use_readthrough_query_cache": 0, "use_cache": 0})
- yield
- self.snuba_update_config({"use_readthrough_query_cache": None, "use_cache": None})
-
- @classmethod
- def snuba_get_config(cls):
- return _snuba_pool.request("GET", "/config.json").data
-
- @classmethod
- def snuba_update_config(cls, config_vals):
- return _snuba_pool.request("POST", "/config.json", body=json.dumps(config_vals))
-
def create_project(self, **kwargs) -> Project:
if "flags" not in kwargs:
# We insert events directly into snuba in tests, so we need to set has_transactions to True so the
@@ -1368,16 +1232,6 @@ def store_group(self, group):
== 200
)
- def store_outcome(self, group):
- data = [self.__wrap_group(group)]
- assert (
- requests.post(
- settings.SENTRY_SNUBA + "/tests/entities/outcomes/insert",
- data=json.dumps(data),
- ).status_code
- == 200
- )
-
def store_span(self, span, is_eap=False):
span["ingest_in_eap"] = is_eap
assert (
@@ -2272,7 +2126,7 @@ def create_event(self, timestamp, fingerprint=None, user=None):
data = {
"event_id": event_id,
"fingerprint": [fingerprint],
- "timestamp": iso_format(timestamp),
+ "timestamp": timestamp.isoformat(),
"type": "error",
# This is necessary because event type error should not exist without
# an exception being in the payload
@@ -2677,32 +2531,6 @@ def do_request(self, method, url, data=None):
func = getattr(self.client, method)
return func(url, data=data)
- def assert_widget_queries(self, widget_id, data):
- result_queries = DashboardWidgetQuery.objects.filter(widget_id=widget_id).order_by("order")
- for ds, expected_ds in zip(result_queries, data):
- assert ds.name == expected_ds["name"]
- assert ds.fields == expected_ds["fields"]
- assert ds.conditions == expected_ds["conditions"]
-
- def assert_widget(self, widget, order, title, display_type, queries=None):
- assert widget.order == order
- assert widget.display_type == display_type
- assert widget.title == title
-
- if not queries:
- return
-
- self.assert_widget_queries(widget.id, queries)
-
- def assert_widget_data(self, data, title, display_type, queries=None):
- assert data["displayType"] == display_type
- assert data["title"] == title
-
- if not queries:
- return
-
- self.assert_widget_queries(data["id"], queries)
-
def assert_serialized_widget_query(self, data, widget_data_source):
if "id" in data:
assert data["id"] == str(widget_data_source.id)
@@ -2968,50 +2796,6 @@ def mock_chat_postMessage(self):
) as self.mock_post:
yield
- def assert_performance_issue_attachments(
- self, attachment, project_slug, referrer, alert_type="workflow"
- ):
- assert "N+1 Query" in attachment["text"]
- assert (
- "db - SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21"
- in attachment["blocks"][1]["text"]["text"]
- )
- title_link = attachment["blocks"][0]["text"]["text"][13:][1:-1]
- notification_uuid = self.get_notification_uuid(title_link)
- assert (
- attachment["blocks"][-2]["elements"][0]["text"]
- == f"{project_slug} | production | "
- )
-
- def assert_performance_issue_blocks(
- self,
- blocks,
- org: Organization,
- project_slug: str,
- group,
- referrer,
- alert_type: FineTuningAPIKey = FineTuningAPIKey.WORKFLOW,
- issue_link_extra_params=None,
- ):
- notification_uuid = self.get_notification_uuid(blocks[1]["text"]["text"])
- issue_link = f"http://testserver/organizations/{org.slug}/issues/{group.id}/?referrer={referrer}¬ification_uuid={notification_uuid}"
- if issue_link_extra_params is not None:
- issue_link += issue_link_extra_params
- assert (
- blocks[1]["text"]["text"]
- == f":large_blue_circle: :chart_with_upwards_trend: <{issue_link}|*N+1 Query*>"
- )
- assert (
- blocks[2]["text"]["text"]
- == "```db - SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21```"
- )
- assert blocks[3]["elements"][0]["text"] == "State: *New* First Seen: *10\xa0minutes ago*"
- optional_org_id = f"&organizationId={org.id}" if alert_page_needs_org_id(alert_type) else ""
- assert (
- blocks[4]["elements"][0]["text"]
- == f"{project_slug} | production | "
- )
-
def assert_performance_issue_blocks_with_culprit_blocks(
self,
blocks,
@@ -3042,17 +2826,6 @@ def assert_performance_issue_blocks_with_culprit_blocks(
== f"{project_slug} | production | "
)
- def assert_generic_issue_attachments(
- self, attachment, project_slug, referrer, alert_type="workflow"
- ):
- assert attachment["title"] == TEST_ISSUE_OCCURRENCE.issue_title
- assert attachment["text"] == TEST_ISSUE_OCCURRENCE.evidence_display[0].value
- notification_uuid = self.get_notification_uuid(attachment["title_link"])
- assert (
- attachment["footer"]
- == f"{project_slug} | "
- )
-
def assert_generic_issue_blocks(
self,
blocks,
@@ -3615,7 +3388,7 @@ def load_default(self) -> Event:
start, _ = self.get_start_end_from_day_ago(1000)
return self.store_event(
{
- "timestamp": iso_format(start),
+ "timestamp": start.isoformat(),
"contexts": {
"trace": {
"type": "trace",
diff --git a/src/sentry/testutils/factories.py b/src/sentry/testutils/factories.py
index 96f402a651f2cc..96f198c52f59fa 100644
--- a/src/sentry/testutils/factories.py
+++ b/src/sentry/testutils/factories.py
@@ -146,6 +146,8 @@
from sentry.silo.base import SiloMode
from sentry.snuba.dataset import Dataset
from sentry.snuba.models import QuerySubscription, QuerySubscriptionDataSourceHandler
+from sentry.tempest.models import MessageType as TempestMessageType
+from sentry.tempest.models import TempestCredentials
from sentry.testutils.outbox import outbox_runner
from sentry.testutils.silo import assume_test_silo_mode
from sentry.types.activity import ActivityType
@@ -158,6 +160,7 @@
ProjectUptimeSubscriptionMode,
UptimeStatus,
UptimeSubscription,
+ UptimeSubscriptionRegion,
)
from sentry.users.models.identity import Identity, IdentityProvider, IdentityStatus
from sentry.users.models.user import User
@@ -604,6 +607,34 @@ def create_slack_project_rule(project, integration_id, channel_id=None, channel_
def create_project_key(project):
return project.key_set.get_or_create()[0]
+ @staticmethod
+ @assume_test_silo_mode(SiloMode.REGION)
+ def create_tempest_credentials(
+ project: Project,
+ created_by: User | None = None,
+ client_id: str | None = None,
+ client_secret: str | None = None,
+ message: str = "",
+ message_type: str | None = None,
+ latest_fetched_item_id: str | None = None,
+ ):
+ if client_id is None:
+ client_id = str(uuid4())
+ if client_secret is None:
+ client_secret = str(uuid4())
+ if message_type is None:
+ message_type = TempestMessageType.ERROR
+
+ return TempestCredentials.objects.create(
+ project=project,
+ created_by_id=created_by.id if created_by else None,
+ client_id=client_id,
+ client_secret=client_secret,
+ message=message,
+ message_type=message_type,
+ latest_fetched_item_id=latest_fetched_item_id,
+ )
+
@staticmethod
@assume_test_silo_mode(SiloMode.REGION)
def create_release(
@@ -1003,6 +1034,9 @@ def store_event(
@staticmethod
@assume_test_silo_mode(SiloMode.REGION)
def create_group(project, **kwargs):
+ from sentry.models.group import GroupStatus
+ from sentry.types.group import GroupSubStatus
+
kwargs.setdefault("message", "Hello world")
kwargs.setdefault("data", {})
if "type" not in kwargs["data"]:
@@ -1012,6 +1046,10 @@ def create_group(project, **kwargs):
if "metadata" in kwargs:
metadata = kwargs.pop("metadata")
kwargs["data"].setdefault("metadata", {}).update(metadata)
+ if "status" not in kwargs:
+ kwargs["status"] = GroupStatus.UNRESOLVED
+ kwargs["substatus"] = GroupSubStatus.NEW
+
return Group.objects.create(project=project, **kwargs)
@staticmethod
@@ -1945,7 +1983,7 @@ def create_uptime_subscription(
type: str,
subscription_id: str | None,
status: UptimeSubscription.Status,
- url: str,
+ url: str | None,
url_domain: str,
url_domain_suffix: str,
host_provider_id: str,
@@ -1957,6 +1995,10 @@ def create_uptime_subscription(
date_updated: datetime,
trace_sampling: bool = False,
):
+ if url is None:
+ url = petname.generate().title()
+ url = f"http://{url}.com"
+
return UptimeSubscription.objects.create(
type=type,
subscription_id=subscription_id,
@@ -1980,10 +2022,12 @@ def create_project_uptime_subscription(
env: Environment | None,
uptime_subscription: UptimeSubscription,
mode: ProjectUptimeSubscriptionMode,
- name: str,
+ name: str | None,
owner: Actor | None,
uptime_status: UptimeStatus,
):
+ if name is None:
+ name = petname.generate().title()
owner_team_id = None
owner_user_id = None
if owner:
@@ -2003,6 +2047,14 @@ def create_project_uptime_subscription(
uptime_status=uptime_status,
)
+ @staticmethod
+ def create_uptime_subscription_region(
+ subscription: UptimeSubscription, region_slug: str
+ ) -> UptimeSubscriptionRegion:
+ return UptimeSubscriptionRegion.objects.create(
+ uptime_subscription=subscription, region_slug=region_slug
+ )
+
@staticmethod
@assume_test_silo_mode(SiloMode.REGION)
def create_dashboard(
@@ -2061,13 +2113,18 @@ def create_dashboard_widget_query(
def create_workflow(
name: str | None = None,
organization: Organization | None = None,
+ config: dict[str, Any] | None = None,
**kwargs,
) -> Workflow:
if organization is None:
organization = Factories.create_organization()
if name is None:
name = petname.generate(2, " ", letters=10).title()
- return Workflow.objects.create(organization=organization, name=name, **kwargs)
+ if config is None:
+ config = {}
+ return Workflow.objects.create(
+ organization=organization, name=name, config=config, **kwargs
+ )
@staticmethod
@assume_test_silo_mode(SiloMode.REGION)
diff --git a/src/sentry/testutils/fixtures.py b/src/sentry/testutils/fixtures.py
index bff1a03aa1ec3b..6c6b2c535c750a 100644
--- a/src/sentry/testutils/fixtures.py
+++ b/src/sentry/testutils/fixtures.py
@@ -12,6 +12,7 @@
from sentry.incidents.models.alert_rule import AlertRule, AlertRuleMonitorTypeInt
from sentry.integrations.models.integration import Integration
from sentry.integrations.models.organization_integration import OrganizationIntegration
+from sentry.issues.grouptype import ErrorGroupType
from sentry.models.activity import Activity
from sentry.models.environment import Environment
from sentry.models.grouprelease import GroupRelease
@@ -26,8 +27,9 @@
from sentry.organizations.services.organization import RpcOrganization
from sentry.silo.base import SiloMode
from sentry.snuba.models import QuerySubscription
+from sentry.tempest.models import TempestCredentials
from sentry.testutils.factories import Factories
-from sentry.testutils.helpers.datetime import before_now, iso_format
+from sentry.testutils.helpers.datetime import before_now
from sentry.testutils.silo import assume_test_silo_mode
# XXX(dcramer): this is a compatibility layer to transition to pytest-based fixtures
@@ -45,6 +47,7 @@
from sentry.users.models.user import User
from sentry.users.services.user import RpcUser
from sentry.workflow_engine.models import DataSource, Detector, DetectorState, Workflow
+from sentry.workflow_engine.models.data_condition import Condition
from sentry.workflow_engine.types import DetectorPriorityLevel
@@ -101,7 +104,7 @@ def event(self):
data={
"event_id": "a" * 32,
"message": "\u3053\u3093\u306b\u3061\u306f",
- "timestamp": iso_format(before_now(seconds=1)),
+ "timestamp": before_now(seconds=1).isoformat(),
},
project_id=self.project.id,
)
@@ -126,7 +129,7 @@ def integration(self):
external_id="github:1",
metadata={
"access_token": "xxxxx-xxxxxxxxx-xxxxxxxxxx-xxxxxxxxxxxx",
- "expires_at": iso_format(timezone.now() + timedelta(days=14)),
+ "expires_at": (timezone.now() + timedelta(days=14)).isoformat(),
},
)
integration.add_organization(self.organization, self.user)
@@ -279,6 +282,9 @@ def create_usersocialauth(
def store_event(self, *args, **kwargs) -> Event:
return Factories.store_event(*args, **kwargs)
+ def create_tempest_credentials(self, project: Project, *args, **kwargs) -> TempestCredentials:
+ return Factories.create_tempest_credentials(project, *args, **kwargs)
+
def create_group(self, project=None, *args, **kwargs):
if project is None:
project = self.project
@@ -610,9 +616,8 @@ def create_data_source(self, *args, **kwargs) -> DataSource:
def create_data_condition(
self,
- condition="eq",
comparison="10",
- type="",
+ type=Condition.EQUAL,
condition_result=None,
condition_group=None,
**kwargs,
@@ -623,7 +628,6 @@ def create_data_condition(
condition_group = self.create_data_condition_group()
return Factories.create_data_condition(
- condition=condition,
comparison=comparison,
type=type,
condition_result=condition_result,
@@ -635,12 +639,13 @@ def create_detector(
self,
*args,
project=None,
+ type=ErrorGroupType.slug,
**kwargs,
) -> Detector:
if project is None:
project = self.create_project(organization=self.organization)
- return Factories.create_detector(*args, project=project, **kwargs)
+ return Factories.create_detector(*args, project=project, type=type, **kwargs)
def create_detector_state(self, *args, **kwargs) -> DetectorState:
return Factories.create_detector_state(*args, **kwargs)
@@ -672,7 +677,7 @@ def create_uptime_subscription(
type: str = "test",
subscription_id: str | None = None,
status: UptimeSubscription.Status = UptimeSubscription.Status.ACTIVE,
- url="http://sentry.io/",
+ url: str | None = None,
host_provider_id="TEST",
url_domain="sentry",
url_domain_suffix="io",
@@ -683,13 +688,16 @@ def create_uptime_subscription(
body=None,
date_updated: None | datetime = None,
trace_sampling: bool = False,
+ region_slugs: list[str] | None = None,
) -> UptimeSubscription:
if date_updated is None:
date_updated = timezone.now()
if headers is None:
headers = []
+ if region_slugs is None:
+ region_slugs = []
- return Factories.create_uptime_subscription(
+ subscription = Factories.create_uptime_subscription(
type=type,
subscription_id=subscription_id,
status=status,
@@ -705,6 +713,10 @@ def create_uptime_subscription(
body=body,
trace_sampling=trace_sampling,
)
+ for region_slug in region_slugs:
+ Factories.create_uptime_subscription_region(subscription, region_slug)
+
+ return subscription
def create_project_uptime_subscription(
self,
@@ -712,7 +724,7 @@ def create_project_uptime_subscription(
env: Environment | None = None,
uptime_subscription: UptimeSubscription | None = None,
mode=ProjectUptimeSubscriptionMode.AUTO_DETECTED_ACTIVE,
- name="Test Name",
+ name: str | None = None,
owner: User | Team | None = None,
uptime_status=UptimeStatus.OK,
) -> ProjectUptimeSubscription:
diff --git a/src/sentry/testutils/helpers/__init__.py b/src/sentry/testutils/helpers/__init__.py
index 215689798ba602..1e69c4a8f87230 100644
--- a/src/sentry/testutils/helpers/__init__.py
+++ b/src/sentry/testutils/helpers/__init__.py
@@ -3,7 +3,6 @@
from .features import * # NOQA
from .link_header import * # NOQA
from .options import * # NOQA
-from .query import * # NOQA
from .slack import * # NOQA
from .socket import * # NOQA
from .task_runner import * # NOQA
diff --git a/src/sentry/testutils/helpers/backups.py b/src/sentry/testutils/helpers/backups.py
index 76c8c2f48a1585..504ea3bf0a1634 100644
--- a/src/sentry/testutils/helpers/backups.py
+++ b/src/sentry/testutils/helpers/backups.py
@@ -101,6 +101,7 @@
from sentry.sentry_apps.models.sentry_app import SentryApp
from sentry.silo.base import SiloMode
from sentry.silo.safety import unguarded_write
+from sentry.tempest.models import TempestCredentials
from sentry.testutils.cases import TestCase, TransactionTestCase
from sentry.testutils.factories import get_fixture_path
from sentry.testutils.fixtures import Fixtures
@@ -667,18 +668,15 @@ def create_exhaustive_organization(
organization=org,
)
- send_notification_action = self.create_action(type=Action.Type.NOTIFICATION, data="")
+ send_notification_action = self.create_action(type=Action.Type.SLACK, data="")
self.create_data_condition_group_action(
action=send_notification_action,
condition_group=notification_condition_group,
)
- # TODO @saponifi3d: Update comparison to be DetectorState.Critical
data_condition = self.create_data_condition(
- condition="eq",
- comparison="critical",
- type="WorkflowCondition",
- condition_result="True",
+ comparison=75,
+ condition_result=True,
condition_group=notification_condition_group,
)
@@ -694,16 +692,13 @@ def create_exhaustive_organization(
organization=org,
)
- # TODO @saponifi3d: Create or define trigger workflow action type
trigger_workflows_action = self.create_action(type=Action.Type.WEBHOOK, data="")
self.create_data_condition_group_action(
action=trigger_workflows_action, condition_group=detector_conditions
)
self.create_data_condition(
- condition="eq",
- comparison="critical",
- type="DetectorCondition",
- condition_result="True",
+ comparison=75,
+ condition_result=True,
condition_group=detector_conditions,
)
detector.workflow_condition_group = detector_conditions
@@ -714,6 +709,15 @@ def create_exhaustive_organization(
alert_rule_trigger=trigger, data_condition=data_condition
)
+ TempestCredentials.objects.create(
+ project=project,
+ created_by_id=owner_id,
+ client_id="test_client_id",
+ client_secret="test_client_secret",
+ message="test_message",
+ latest_fetched_item_id="test_latest_fetched_item_id",
+ )
+
return org
@assume_test_silo_mode(SiloMode.CONTROL)
diff --git a/src/sentry/testutils/helpers/datetime.py b/src/sentry/testutils/helpers/datetime.py
index da1af95d8c5c51..206634b893e024 100644
--- a/src/sentry/testutils/helpers/datetime.py
+++ b/src/sentry/testutils/helpers/datetime.py
@@ -1,15 +1,10 @@
from __future__ import annotations
-import time
from datetime import UTC, datetime, timedelta
import time_machine
-__all__ = ["iso_format", "before_now", "timestamp_format"]
-
-
-def iso_format(date: datetime) -> str:
- return date.isoformat()[:19]
+__all__ = ["before_now"]
def before_now(**kwargs: float) -> datetime:
@@ -17,10 +12,6 @@ def before_now(**kwargs: float) -> datetime:
return date - timedelta(microseconds=date.microsecond % 1000)
-def timestamp_format(datetime):
- return time.mktime(datetime.utctimetuple()) + datetime.microsecond / 1e6
-
-
class MockClock:
"""Returns a distinct, increasing timestamp each time it is called."""
diff --git a/src/sentry/testutils/helpers/query.py b/src/sentry/testutils/helpers/query.py
deleted file mode 100644
index d9e8e9759e3ebb..00000000000000
--- a/src/sentry/testutils/helpers/query.py
+++ /dev/null
@@ -1,30 +0,0 @@
-from __future__ import annotations
-
-from typing import Any
-
-import sqlparse
-from sqlparse.tokens import DML
-
-__all__ = ("parse_queries",)
-
-
-def parse_queries(captured_queries: list[dict[str, Any]]) -> dict[str, int]:
- write_ops = ["INSERT", "UPDATE", "DELETE"]
-
- real_queries: dict[str, int] = {}
-
- for query in captured_queries:
- raw_sql = query["sql"]
- parsed = sqlparse.parse(raw_sql)
- for token_index, token in enumerate(parsed[0].tokens):
- if token.ttype is DML:
- if token.value.upper() in write_ops:
- for t in parsed[0].tokens[token_index + 1 :]:
- if isinstance(t, sqlparse.sql.Identifier):
- table_name = t.get_real_name()
- if real_queries.get(table_name) is None:
- real_queries[table_name] = 0
- real_queries[table_name] += 1
- break
-
- return real_queries
diff --git a/src/sentry/testutils/helpers/redis.py b/src/sentry/testutils/helpers/redis.py
index dd41b06dfd4e67..9e4bdc0303c5be 100644
--- a/src/sentry/testutils/helpers/redis.py
+++ b/src/sentry/testutils/helpers/redis.py
@@ -9,11 +9,8 @@
from sentry.testutils.helpers import override_options
-@contextmanager
def mock_redis_buffer():
- buffer = RedisBuffer()
- with patch("sentry.buffer.backend", new=buffer):
- yield buffer
+ return patch("sentry.buffer.backend", new=RedisBuffer())
@contextmanager
diff --git a/src/sentry/testutils/helpers/slack.py b/src/sentry/testutils/helpers/slack.py
index 000a0155d50827..bb5d3458c242c0 100644
--- a/src/sentry/testutils/helpers/slack.py
+++ b/src/sentry/testutils/helpers/slack.py
@@ -57,17 +57,6 @@ def add_identity(
return idp
-def find_identity(idp: IdentityProvider, user: User) -> Identity | None:
- identities = Identity.objects.filter(
- idp=idp,
- user=user,
- status=IdentityStatus.VALID,
- )
- if not identities:
- return None
- return identities[0]
-
-
@assume_test_silo_mode(SiloMode.CONTROL)
def link_user(user: User, idp: IdentityProvider, slack_id: str) -> None:
Identity.objects.create(
diff --git a/src/sentry/testutils/pytest/relay.py b/src/sentry/testutils/pytest/relay.py
index 76dff65c2da813..5bff85dcf2cd53 100644
--- a/src/sentry/testutils/pytest/relay.py
+++ b/src/sentry/testutils/pytest/relay.py
@@ -69,6 +69,7 @@ def relay_server_setup(live_server, tmpdir_factory):
relay_port = 33331
redis_db = TEST_REDIS_DB
+ use_new_dev_services = environ.get("USE_NEW_DEVSERVICES", "0") == "1"
from sentry.relay import projectconfig_cache
from sentry.relay.projectconfig_cache.redis import RedisProjectConfigCache
@@ -80,8 +81,8 @@ def relay_server_setup(live_server, tmpdir_factory):
template_vars = {
"SENTRY_HOST": f"http://host.docker.internal:{port}/",
"RELAY_PORT": relay_port,
- "KAFKA_HOST": "sentry_kafka",
- "REDIS_HOST": "sentry_redis",
+ "KAFKA_HOST": "kafka-kafka-1" if use_new_dev_services else "sentry_kafka",
+ "REDIS_HOST": "redis-redis-1" if use_new_dev_services else "sentry_redis",
"REDIS_DB": redis_db,
}
@@ -106,7 +107,7 @@ def relay_server_setup(live_server, tmpdir_factory):
options = {
"image": RELAY_TEST_IMAGE,
"ports": {"%s/tcp" % relay_port: relay_port},
- "network": "sentry",
+ "network": "devservices" if use_new_dev_services else "sentry",
"detach": True,
"name": container_name,
"volumes": {config_path: {"bind": "/etc/relay"}},
diff --git a/src/sentry/testutils/relay.py b/src/sentry/testutils/relay.py
index 793e69a9207d80..25232a63af448e 100644
--- a/src/sentry/testutils/relay.py
+++ b/src/sentry/testutils/relay.py
@@ -35,9 +35,6 @@ class RelayStoreHelper(RequiredBaseclass):
get_relay_minidump_url: Any
get_relay_unreal_url: Any
- def use_relay(self):
- return True
-
def post_and_retrieve_event(self, data):
url = self.get_relay_store_url(self.project.id)
responses.add_passthru(url)
diff --git a/src/sentry/testutils/requests.py b/src/sentry/testutils/requests.py
index a7adf1f9ce4b0e..4ce41b0355d860 100644
--- a/src/sentry/testutils/requests.py
+++ b/src/sentry/testutils/requests.py
@@ -7,6 +7,8 @@
from django.contrib.auth.models import AnonymousUser
from django.core.cache import cache
from django.http import HttpRequest
+from rest_framework.request import Request
+from rest_framework.views import APIView
from sentry.app import env
from sentry.middleware.auth import AuthenticationMiddleware
@@ -67,3 +69,13 @@ def make_user_request_from_org(org=None):
request, user = make_user_request(org)
request.session["activeorg"] = org.slug
return request, user
+
+
+def drf_request_from_request(request: HttpRequest) -> Request:
+ ret = APIView().initialize_request(request)
+ # reattach these if missing
+ # XXX: technically `HttpRequest` shouldn't have auth but our tests do!)
+ for attr in ("auth", "user"):
+ if hasattr(request, attr):
+ setattr(ret, attr, getattr(request, attr))
+ return ret
diff --git a/src/sentry/testutils/skips.py b/src/sentry/testutils/skips.py
index 6937880a079417..d77514e19231e9 100644
--- a/src/sentry/testutils/skips.py
+++ b/src/sentry/testutils/skips.py
@@ -1,50 +1,11 @@
from __future__ import annotations
-import os
import socket
-from collections.abc import Callable
-from typing import Any, TypeVar
from urllib.parse import urlparse
import pytest
from django.conf import settings
-T = TypeVar("T", bound=Callable[..., Any])
-
-
-def is_arm64() -> bool:
- return os.uname().machine == "arm64"
-
-
-requires_not_arm64 = pytest.mark.skipif(
- is_arm64(), reason="this test fails in our arm64 testing env"
-)
-
-
-def xfail_if_not_postgres(reason: str) -> Callable[[T], T]:
- def decorator(function: T) -> T:
- return pytest.mark.xfail(os.environ.get("TEST_SUITE") != "postgres", reason=reason)(
- function
- )
-
- return decorator
-
-
-def skip_for_relay_store(reason: str) -> Callable[[T], T]:
- """
- Decorator factory will skip marked tests if Relay is enabled.
- A test decorated with @skip_for_relay_store("this test has been moved in relay")
- Will not be executed when the settings SENTRY_USE_RELAY = True
- :param reason: the reason the test should be skipped
-
- Note: Eventually, when Relay becomes compulsory, tests marked with this decorator will be deleted.
- """
-
- def decorator(function: T) -> T:
- return pytest.mark.skipif(settings.SENTRY_USE_RELAY, reason=reason)(function)
-
- return decorator
-
def _service_available(host: str, port: int) -> bool:
try:
diff --git a/src/sentry/toolbar/views/iframe_view.py b/src/sentry/toolbar/views/iframe_view.py
index 58abcb5439cff1..a73af6dfb9a90d 100644
--- a/src/sentry/toolbar/views/iframe_view.py
+++ b/src/sentry/toolbar/views/iframe_view.py
@@ -3,8 +3,10 @@
from django.http import HttpRequest, HttpResponse
from django.http.response import HttpResponseBase
+from sentry.api.utils import generate_region_url
from sentry.models.organization import Organization
from sentry.models.project import Project
+from sentry.organizations.absolute_url import generate_organization_url
from sentry.toolbar.utils.url import is_origin_allowed
from sentry.web.frontend.base import ProjectView, region_silo_view
@@ -61,6 +63,8 @@ def _respond_with_state(self, state: str):
"logging": self.request.GET.get("logging", ""),
"organization_slug": self.organization_slug,
"project_id_or_slug": self.project_id_or_slug,
+ "organization_url": generate_organization_url(self.organization_slug),
+ "region_url": generate_region_url(),
},
)
diff --git a/src/sentry/types/activity.py b/src/sentry/types/activity.py
index e850e61e854b47..1dd4a3c2373ad7 100644
--- a/src/sentry/types/activity.py
+++ b/src/sentry/types/activity.py
@@ -67,3 +67,16 @@ class ActivityType(Enum):
ActivityType.DELETED_ATTACHMENT, # 27
]
)
+
+
+STATUS_CHANGE_ACTIVITY_TYPES = (
+ ActivityType.SET_RESOLVED,
+ ActivityType.SET_UNRESOLVED,
+ ActivityType.SET_IGNORED,
+ ActivityType.SET_REGRESSION,
+ ActivityType.SET_RESOLVED_IN_RELEASE,
+ ActivityType.SET_RESOLVED_BY_AGE,
+ ActivityType.SET_RESOLVED_IN_COMMIT,
+ ActivityType.SET_RESOLVED_IN_PULL_REQUEST,
+ ActivityType.SET_ESCALATING,
+)
diff --git a/src/sentry/uptime/config_producer.py b/src/sentry/uptime/config_producer.py
index 90ea85cc65e486..dbf486fa188cf1 100644
--- a/src/sentry/uptime/config_producer.py
+++ b/src/sentry/uptime/config_producer.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import logging
from uuid import UUID
from arroyo import Topic as ArroyoTopic
@@ -8,9 +9,12 @@
from sentry_kafka_schemas.schema_types.uptime_configs_v1 import CheckConfig
from sentry.conf.types.kafka_definition import Topic, get_topic_codec
+from sentry.uptime.subscriptions.regions import get_region_config
from sentry.utils.arroyo_producer import SingletonProducer
from sentry.utils.kafka_config import get_kafka_producer_cluster_options, get_topic_definition
+logger = logging.getLogger(__name__)
+
UPTIME_CONFIGS_CODEC: Codec[CheckConfig] = get_topic_codec(Topic.UPTIME_CONFIGS)
@@ -25,16 +29,30 @@ def _get_producer() -> KafkaProducer:
_configs_producer = SingletonProducer(_get_producer)
-def produce_config(config: CheckConfig):
- _produce_to_kafka(UUID(config["subscription_id"]), UPTIME_CONFIGS_CODEC.encode(config))
+def produce_config(destination_region_slug: str, config: CheckConfig):
+ _produce_to_kafka(
+ destination_region_slug,
+ UUID(config["subscription_id"]),
+ UPTIME_CONFIGS_CODEC.encode(config),
+ )
+
+def produce_config_removal(destination_region_slug: str, subscription_id: str):
+ _produce_to_kafka(destination_region_slug, UUID(subscription_id), None)
-def produce_config_removal(subscription_id: str):
- _produce_to_kafka(UUID(subscription_id), None)
+def _produce_to_kafka(
+ destination_region_slug: str, subscription_id: UUID, value: bytes | None
+) -> None:
+ region_config = get_region_config(destination_region_slug)
+ if region_config is None:
+ logger.error(
+ "Attempted to create uptime subscription with invalid region slug",
+ extra={"region_slug": destination_region_slug, "subscription_id": subscription_id},
+ )
+ return
-def _produce_to_kafka(subscription_id: UUID, value: bytes | None) -> None:
- topic = get_topic_definition(Topic.UPTIME_CONFIGS)["real_topic_name"]
+ topic = get_topic_definition(region_config.config_topic)["real_topic_name"]
payload = KafkaPayload(
subscription_id.bytes,
# Typically None is not allowed for the arroyo payload, but in this
diff --git a/src/sentry/uptime/consumers/results_consumer.py b/src/sentry/uptime/consumers/results_consumer.py
index 7af6259ddacd85..87be1361ae67cb 100644
--- a/src/sentry/uptime/consumers/results_consumer.py
+++ b/src/sentry/uptime/consumers/results_consumer.py
@@ -1,6 +1,7 @@
from __future__ import annotations
import logging
+import random
from datetime import datetime, timedelta, timezone
from sentry_kafka_schemas.schema_types.uptime_results_v1 import (
@@ -24,13 +25,18 @@
ProjectUptimeSubscriptionMode,
UptimeStatus,
UptimeSubscription,
+ UptimeSubscriptionRegion,
)
+from sentry.uptime.subscriptions.regions import get_active_region_configs
from sentry.uptime.subscriptions.subscriptions import (
delete_uptime_subscriptions_for_project,
get_or_create_uptime_subscription,
remove_uptime_subscription_if_unused,
)
-from sentry.uptime.subscriptions.tasks import send_uptime_config_deletion
+from sentry.uptime.subscriptions.tasks import (
+ send_uptime_config_deletion,
+ update_remote_uptime_subscription,
+)
from sentry.utils import metrics
logger = logging.getLogger(__name__)
@@ -73,16 +79,64 @@ class UptimeResultProcessor(ResultProcessor[CheckResult, UptimeSubscription]):
def get_subscription_id(self, result: CheckResult) -> str:
return result["subscription_id"]
+ def check_and_update_regions(self, subscription: UptimeSubscription):
+ """
+ This method will check if regions have been added or removed from our region configuration,
+ and updates regions associated with this uptime monitor to reflect the new state. This is
+ done probabilistically, so that the check is performed roughly once an hour for each uptime
+ monitor.
+ """
+ # Run region checks and updates roughly once an hour
+ chance_to_run = subscription.interval_seconds / timedelta(hours=1).total_seconds()
+ if random.random() >= chance_to_run:
+ return
+
+ subscription_region_slugs = {r.region_slug for r in subscription.regions.all()}
+ active_region_slugs = {c.slug for c in get_active_region_configs()}
+ if subscription_region_slugs == active_region_slugs:
+ # Regions haven't changed, exit early.
+ return
+
+ new_region_slugs = active_region_slugs - subscription_region_slugs
+ removed_region_slugs = subscription_region_slugs - active_region_slugs
+ if new_region_slugs:
+ new_regions = [
+ UptimeSubscriptionRegion(uptime_subscription=subscription, region_slug=slug)
+ for slug in new_region_slugs
+ ]
+ UptimeSubscriptionRegion.objects.bulk_create(new_regions, ignore_conflicts=True)
+
+ if removed_region_slugs:
+ for deleted_region in UptimeSubscriptionRegion.objects.filter(
+ uptime_subscription=subscription, region_slug__in=removed_region_slugs
+ ):
+ if subscription.subscription_id:
+ # We need to explicitly send deletes here before we remove the region
+ send_uptime_config_deletion(
+ deleted_region.region_slug, subscription.subscription_id
+ )
+ deleted_region.delete()
+
+ # Regardless of whether we added or removed regions, we need to send an updated config to all active
+ # regions for this subscription so that they all get an update set of currently active regions.
+ subscription.update(status=UptimeSubscription.Status.UPDATING.value)
+ update_remote_uptime_subscription.delay(subscription.id)
+
def handle_result(self, subscription: UptimeSubscription | None, result: CheckResult):
logger.info("process_result", extra=result)
if subscription is None:
# If no subscription in the Postgres, this subscription has been orphaned. Remove
# from the checker
- send_uptime_config_deletion(result["subscription_id"])
+ # TODO: Send to region specifically from this check result once we update the schema
+ send_uptime_config_deletion(
+ get_active_region_configs()[0].slug, result["subscription_id"]
+ )
metrics.incr("uptime.result_processor.subscription_not_found", sample_rate=1.0)
return
+ self.check_and_update_regions(subscription)
+
project_subscriptions = list(subscription.projectuptimesubscription_set.all())
cluster = _get_cluster()
@@ -333,3 +387,7 @@ def has_reached_status_threshold(
class UptimeResultsStrategyFactory(ResultsStrategyFactory[CheckResult, UptimeSubscription]):
result_processor_cls = UptimeResultProcessor
topic_for_codec = Topic.UPTIME_RESULTS
+ identifier = "uptime"
+
+ def build_payload_grouping_key(self, result: CheckResult) -> str:
+ return self.result_processor.get_subscription_id(result)
diff --git a/src/sentry/uptime/detectors/tasks.py b/src/sentry/uptime/detectors/tasks.py
index 1e37a61d457997..836cae9b82e8a4 100644
--- a/src/sentry/uptime/detectors/tasks.py
+++ b/src/sentry/uptime/detectors/tasks.py
@@ -6,7 +6,7 @@
from django.utils import timezone
-from sentry import features
+from sentry import audit_log, features
from sentry.locks import locks
from sentry.models.organization import Organization
from sentry.models.project import Project
@@ -22,7 +22,7 @@
should_detect_for_organization,
should_detect_for_project,
)
-from sentry.uptime.models import ProjectUptimeSubscriptionMode
+from sentry.uptime.models import ProjectUptimeSubscription, ProjectUptimeSubscriptionMode
from sentry.uptime.subscriptions.subscriptions import (
delete_uptime_subscriptions_for_project,
get_auto_monitored_subscriptions_for_project,
@@ -30,6 +30,7 @@
is_url_auto_monitored_for_project,
)
from sentry.utils import metrics
+from sentry.utils.audit import create_system_audit_entry
from sentry.utils.hashlib import md5_text
from sentry.utils.locking import UnableToAcquireLock
@@ -221,16 +222,22 @@ def process_candidate_url(
)
if features.has("organizations:uptime-automatic-subscription-creation", project.organization):
# If we hit this point, then the url looks worth monitoring. Create an uptime subscription in monitor mode.
- monitor_url_for_project(project, url)
+ uptime_monitor = monitor_url_for_project(project, url)
# Disable auto-detection on this project and organization now that we've successfully found a hostname
project.update_option("sentry:uptime_autodetection", False)
project.organization.update_option("sentry:uptime_autodetection", False)
+ create_system_audit_entry(
+ organization=project.organization,
+ target_object=uptime_monitor.id,
+ event=audit_log.get_event_id("UPTIME_MONITOR_ADD"),
+ data=uptime_monitor.get_audit_log_data(),
+ )
metrics.incr("uptime.detectors.candidate_url.succeeded", sample_rate=1.0)
return True
-def monitor_url_for_project(project: Project, url: str):
+def monitor_url_for_project(project: Project, url: str) -> ProjectUptimeSubscription:
"""
Start monitoring a url for a project. Creates a subscription using our onboarding interval and links the project to
it. Also deletes any other auto-detected monitors since this one should replace them.
@@ -244,7 +251,8 @@ def monitor_url_for_project(project: Project, url: str):
ProjectUptimeSubscriptionMode.AUTO_DETECTED_ACTIVE,
],
)
- get_or_create_project_uptime_subscription(
+ metrics.incr("uptime.detectors.candidate_url.monitor_created", sample_rate=1.0)
+ return get_or_create_project_uptime_subscription(
project,
# TODO(epurkhiser): This is where we would put the environment object
# from autodetection if we decide to do that.
@@ -253,8 +261,7 @@ def monitor_url_for_project(project: Project, url: str):
interval_seconds=ONBOARDING_SUBSCRIPTION_INTERVAL_SECONDS,
timeout_ms=ONBOARDING_SUBSCRIPTION_TIMEOUT_MS,
mode=ProjectUptimeSubscriptionMode.AUTO_DETECTED_ONBOARDING,
- )
- metrics.incr("uptime.detectors.candidate_url.monitor_created", sample_rate=1.0)
+ )[0]
def is_failed_url(url: str) -> bool:
diff --git a/src/sentry/uptime/endpoints/organiation_uptime_alert_index.py b/src/sentry/uptime/endpoints/organiation_uptime_alert_index.py
new file mode 100644
index 00000000000000..3961bb0f93656a
--- /dev/null
+++ b/src/sentry/uptime/endpoints/organiation_uptime_alert_index.py
@@ -0,0 +1,120 @@
+from django.db.models import Q
+from drf_spectacular.utils import extend_schema
+from rest_framework.request import Request
+from rest_framework.response import Response
+
+from sentry.api.api_owners import ApiOwner
+from sentry.api.api_publish_status import ApiPublishStatus
+from sentry.api.base import region_silo_endpoint
+from sentry.api.bases import NoProjects
+from sentry.api.bases.organization import OrganizationEndpoint, OrganizationPermission
+from sentry.api.helpers.teams import get_teams
+from sentry.api.paginator import OffsetPaginator
+from sentry.api.serializers import serialize
+from sentry.apidocs.constants import RESPONSE_FORBIDDEN, RESPONSE_NOT_FOUND, RESPONSE_UNAUTHORIZED
+from sentry.apidocs.parameters import GlobalParams, OrganizationParams, UptimeParams
+from sentry.apidocs.utils import inline_sentry_response_serializer
+from sentry.db.models.query import in_iexact
+from sentry.models.organization import Organization
+from sentry.search.utils import tokenize_query
+from sentry.types.actor import Actor
+from sentry.uptime.endpoints.serializers import (
+ ProjectUptimeSubscriptionSerializer,
+ ProjectUptimeSubscriptionSerializerResponse,
+)
+from sentry.uptime.models import ProjectUptimeSubscription
+
+
+@region_silo_endpoint
+@extend_schema(tags=["Crons"])
+class OrganizationUptimeAlertIndexEndpoint(OrganizationEndpoint):
+ publish_status = {
+ "GET": ApiPublishStatus.EXPERIMENTAL,
+ }
+ owner = ApiOwner.CRONS
+ permission_classes = (OrganizationPermission,)
+
+ @extend_schema(
+ operation_id="Retrieve Uptime Alets for an Organization",
+ parameters=[
+ GlobalParams.ORG_ID_OR_SLUG,
+ OrganizationParams.PROJECT,
+ GlobalParams.ENVIRONMENT,
+ UptimeParams.OWNER,
+ ],
+ responses={
+ 200: inline_sentry_response_serializer(
+ "UptimeAlertList", list[ProjectUptimeSubscriptionSerializerResponse]
+ ),
+ 401: RESPONSE_UNAUTHORIZED,
+ 403: RESPONSE_FORBIDDEN,
+ 404: RESPONSE_NOT_FOUND,
+ },
+ )
+ def get(self, request: Request, organization: Organization) -> Response:
+ """
+ Lists uptime alerts. May be filtered to a project or environment.
+ """
+ try:
+ filter_params = self.get_filter_params(request, organization, date_filter_optional=True)
+ except NoProjects:
+ return self.respond([])
+
+ queryset = ProjectUptimeSubscription.objects.filter(
+ project__organization_id=organization.id, project_id__in=filter_params["project_id"]
+ )
+ query = request.GET.get("query")
+ owners = request.GET.getlist("owner")
+
+ if "environment" in filter_params:
+ queryset = queryset.filter(environment__in=filter_params["environment_objects"])
+
+ if owners:
+ owners_set = set(owners)
+
+ # Remove special values from owners, this can't be parsed as an Actor
+ include_myteams = "myteams" in owners_set
+ owners_set.discard("myteams")
+ include_unassigned = "unassigned" in owners_set
+ owners_set.discard("unassigned")
+
+ actors = [Actor.from_identifier(identifier) for identifier in owners_set]
+
+ user_ids = [actor.id for actor in actors if actor.is_user]
+ team_ids = [actor.id for actor in actors if actor.is_team]
+
+ teams = get_teams(
+ request,
+ organization,
+ teams=[*team_ids, *(["myteams"] if include_myteams else [])],
+ )
+ team_ids = [team.id for team in teams]
+
+ owner_filter = Q(owner_user_id__in=user_ids) | Q(owner_team_id__in=team_ids)
+
+ if include_unassigned:
+ unassigned_filter = Q(owner_user_id=None) & Q(owner_team_id=None)
+ queryset = queryset.filter(unassigned_filter | owner_filter)
+ else:
+ queryset = queryset.filter(owner_filter)
+
+ if query:
+ tokens = tokenize_query(query)
+ for key, value in tokens.items():
+ if key == "query":
+ query_value = " ".join(value)
+ queryset = queryset.filter(
+ Q(name__icontains=query_value)
+ | Q(uptime_subscription__url__icontains=query_value)
+ )
+ elif key == "name":
+ queryset = queryset.filter(in_iexact("name", value))
+ else:
+ queryset = queryset.none()
+
+ return self.paginate(
+ request=request,
+ queryset=queryset,
+ on_results=lambda x: serialize(x, request.user, ProjectUptimeSubscriptionSerializer()),
+ paginator_cls=OffsetPaginator,
+ )
diff --git a/src/sentry/uptime/issue_platform.py b/src/sentry/uptime/issue_platform.py
index 9b72df0aa80dab..ef812b8330741e 100644
--- a/src/sentry/uptime/issue_platform.py
+++ b/src/sentry/uptime/issue_platform.py
@@ -45,7 +45,7 @@ def build_occurrence_from_result(
),
IssueEvidence(
name="Duration",
- value=str(result["duration_ms"]),
+ value=f"{result["duration_ms"]}ms",
important=False,
),
]
diff --git a/src/sentry/uptime/migrations/0019_uptime_region.py b/src/sentry/uptime/migrations/0019_uptime_region.py
new file mode 100644
index 00000000000000..5d0e9b64971321
--- /dev/null
+++ b/src/sentry/uptime/migrations/0019_uptime_region.py
@@ -0,0 +1,92 @@
+# Generated by Django 5.1.4 on 2024-12-17 23:47
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+import sentry.db.models.fields.bounded
+import sentry.db.models.fields.foreignkey
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("uptime", "0018_add_trace_sampling_field_to_uptime"),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="Region",
+ fields=[
+ (
+ "id",
+ sentry.db.models.fields.bounded.BoundedBigAutoField(
+ primary_key=True, serialize=False
+ ),
+ ),
+ ("date_updated", models.DateTimeField(auto_now=True)),
+ ("date_added", models.DateTimeField(auto_now_add=True)),
+ ("slug", models.CharField(max_length=255, unique=True)),
+ ("name", models.CharField(max_length=255)),
+ ],
+ options={
+ "db_table": "uptime_region",
+ },
+ ),
+ migrations.CreateModel(
+ name="UptimeSubscriptionRegion",
+ fields=[
+ (
+ "id",
+ sentry.db.models.fields.bounded.BoundedBigAutoField(
+ primary_key=True, serialize=False
+ ),
+ ),
+ ("date_updated", models.DateTimeField(auto_now=True)),
+ ("date_added", models.DateTimeField(auto_now_add=True)),
+ (
+ "region",
+ sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ on_delete=django.db.models.deletion.CASCADE, to="uptime.region"
+ ),
+ ),
+ (
+ "uptime_subscription",
+ sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ on_delete=django.db.models.deletion.CASCADE, to="uptime.uptimesubscription"
+ ),
+ ),
+ ],
+ options={
+ "db_table": "uptime_uptimesubscriptionregion",
+ },
+ ),
+ migrations.AddField(
+ model_name="uptimesubscription",
+ name="regions",
+ field=models.ManyToManyField(
+ through="uptime.UptimeSubscriptionRegion", to="uptime.region"
+ ),
+ ),
+ migrations.AddConstraint(
+ model_name="uptimesubscriptionregion",
+ constraint=models.UniqueConstraint(
+ models.F("uptime_subscription"),
+ models.F("region"),
+ name="uptime_uptimesubscription_region_unique",
+ ),
+ ),
+ ]
diff --git a/src/sentry/uptime/migrations/0020_drop_region.py b/src/sentry/uptime/migrations/0020_drop_region.py
new file mode 100644
index 00000000000000..ead4c8e9c26479
--- /dev/null
+++ b/src/sentry/uptime/migrations/0020_drop_region.py
@@ -0,0 +1,84 @@
+# Generated by Django 5.1.4 on 2024-12-18 22:26
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+import sentry.db.models.fields.foreignkey
+from sentry.new_migrations.migrations import CheckedMigration
+from sentry.new_migrations.monkey.fields import SafeRemoveField
+from sentry.new_migrations.monkey.models import SafeDeleteModel
+from sentry.new_migrations.monkey.state import DeletionAction
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("uptime", "0019_uptime_region"),
+ ]
+
+ operations = [
+ migrations.SeparateDatabaseAndState(
+ state_operations=[
+ migrations.RemoveField(
+ model_name="uptimesubscription",
+ name="regions",
+ ),
+ ]
+ ),
+ migrations.AlterField(
+ model_name="uptimesubscriptionregion",
+ name="uptime_subscription",
+ field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="regions",
+ to="uptime.uptimesubscription",
+ ),
+ ),
+ migrations.RemoveConstraint(
+ model_name="uptimesubscriptionregion",
+ name="uptime_uptimesubscription_region_unique",
+ ),
+ migrations.AddField(
+ model_name="uptimesubscriptionregion",
+ name="region_slug",
+ field=models.CharField(db_index=True, default="", db_default="", max_length=255),
+ preserve_default=False,
+ ),
+ migrations.AlterField(
+ model_name="uptimesubscriptionregion",
+ name="region",
+ field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
+ null=True,
+ db_constraint=False,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="uptime.region",
+ ),
+ ),
+ migrations.AddConstraint(
+ model_name="uptimesubscriptionregion",
+ constraint=models.UniqueConstraint(
+ models.F("uptime_subscription"),
+ models.F("region_slug"),
+ name="uptime_uptimesubscription_region_slug_unique",
+ ),
+ ),
+ SafeRemoveField(
+ model_name="uptimesubscriptionregion",
+ name="region",
+ deletion_action=DeletionAction.MOVE_TO_PENDING,
+ ),
+ SafeDeleteModel(name="Region", deletion_action=DeletionAction.MOVE_TO_PENDING),
+ ]
diff --git a/src/sentry/uptime/migrations/0021_drop_region_table_col.py b/src/sentry/uptime/migrations/0021_drop_region_table_col.py
new file mode 100644
index 00000000000000..b12fd5f85aa529
--- /dev/null
+++ b/src/sentry/uptime/migrations/0021_drop_region_table_col.py
@@ -0,0 +1,35 @@
+# Generated by Django 5.1.4 on 2024-12-18 23:29
+
+from sentry.new_migrations.migrations import CheckedMigration
+from sentry.new_migrations.monkey.fields import SafeRemoveField
+from sentry.new_migrations.monkey.models import SafeDeleteModel
+from sentry.new_migrations.monkey.state import DeletionAction
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("uptime", "0020_drop_region"),
+ ]
+
+ operations = [
+ SafeRemoveField(
+ model_name="uptimesubscriptionregion",
+ name="region",
+ deletion_action=DeletionAction.DELETE,
+ ),
+ SafeDeleteModel(name="Region", deletion_action=DeletionAction.DELETE),
+ ]
diff --git a/src/sentry/uptime/models.py b/src/sentry/uptime/models.py
index 9f53ee68766cd1..77e4ca0cb745e8 100644
--- a/src/sentry/uptime/models.py
+++ b/src/sentry/uptime/models.py
@@ -7,9 +7,11 @@
from django.db.models import Q
from django.db.models.expressions import Value
from django.db.models.functions import MD5, Coalesce
+from sentry_kafka_schemas.schema_types.uptime_configs_v1 import REGIONSCHEDULEMODE_ROUND_ROBIN
from sentry.backup.scopes import RelocationScope
from sentry.db.models import (
+ DefaultFieldsModel,
DefaultFieldsModelExisting,
FlexibleForeignKey,
JSONField,
@@ -107,6 +109,26 @@ class Meta:
]
+@region_silo_model
+class UptimeSubscriptionRegion(DefaultFieldsModel):
+ __relocation_scope__ = RelocationScope.Excluded
+
+ uptime_subscription = FlexibleForeignKey("uptime.UptimeSubscription", related_name="regions")
+ region_slug = models.CharField(max_length=255, db_index=True, db_default="")
+
+ class Meta:
+ app_label = "uptime"
+ db_table = "uptime_uptimesubscriptionregion"
+
+ constraints = [
+ models.UniqueConstraint(
+ "uptime_subscription",
+ "region_slug",
+ name="uptime_uptimesubscription_region_slug_unique",
+ ),
+ ]
+
+
class ProjectUptimeSubscriptionMode(enum.IntEnum):
# Manually created by a user
MANUAL = 1
@@ -205,3 +227,7 @@ def get_active_auto_monitor_count_for_org(organization: Organization) -> int:
ProjectUptimeSubscriptionMode.AUTO_DETECTED_ACTIVE,
],
).count()
+
+
+class UptimeRegionScheduleMode(enum.StrEnum):
+ ROUND_ROBIN = REGIONSCHEDULEMODE_ROUND_ROBIN
diff --git a/src/sentry/uptime/subscriptions/regions.py b/src/sentry/uptime/subscriptions/regions.py
new file mode 100644
index 00000000000000..be954095a48d26
--- /dev/null
+++ b/src/sentry/uptime/subscriptions/regions.py
@@ -0,0 +1,15 @@
+from django.conf import settings
+
+from sentry.conf.types.uptime import UptimeRegionConfig
+
+
+def get_active_region_configs() -> list[UptimeRegionConfig]:
+ return [v for v in settings.UPTIME_REGIONS if v.enabled]
+
+
+def get_region_config(region_slug: str) -> UptimeRegionConfig | None:
+ region = next((r for r in settings.UPTIME_REGIONS if r.slug == region_slug), None)
+ if region is None:
+ # XXX: Temporary hack to guarantee we get a config
+ region = get_active_region_configs()[0]
+ return region
diff --git a/src/sentry/uptime/subscriptions/subscriptions.py b/src/sentry/uptime/subscriptions/subscriptions.py
index 2a0e78b7794819..0c3b58ee869648 100644
--- a/src/sentry/uptime/subscriptions/subscriptions.py
+++ b/src/sentry/uptime/subscriptions/subscriptions.py
@@ -15,9 +15,11 @@
ProjectUptimeSubscription,
ProjectUptimeSubscriptionMode,
UptimeSubscription,
+ UptimeSubscriptionRegion,
headers_json_encoder,
)
from sentry.uptime.rdap.tasks import fetch_subscription_rdap_info
+from sentry.uptime.subscriptions.regions import get_active_region_configs
from sentry.uptime.subscriptions.tasks import (
create_remote_uptime_subscription,
delete_remote_uptime_subscription,
@@ -132,6 +134,13 @@ def get_or_create_uptime_subscription(
subscription.update(status=UptimeSubscription.Status.CREATING.value)
created = True
+ # Associate active regions with this subscription
+ for region_config in get_active_region_configs():
+ # If we add a region here we need to resend the subscriptions
+ created |= UptimeSubscriptionRegion.objects.get_or_create(
+ uptime_subscription=subscription, region_slug=region_config.slug
+ )[1]
+
if created:
create_remote_uptime_subscription.delay(subscription.id)
fetch_subscription_rdap_info.delay(subscription.id)
diff --git a/src/sentry/uptime/subscriptions/tasks.py b/src/sentry/uptime/subscriptions/tasks.py
index c0936b840ddce4..d28948577f27b9 100644
--- a/src/sentry/uptime/subscriptions/tasks.py
+++ b/src/sentry/uptime/subscriptions/tasks.py
@@ -10,7 +10,8 @@
from sentry.snuba.models import QuerySubscription
from sentry.tasks.base import instrumented_task
from sentry.uptime.config_producer import produce_config, produce_config_removal
-from sentry.uptime.models import UptimeSubscription
+from sentry.uptime.models import UptimeRegionScheduleMode, UptimeSubscription
+from sentry.uptime.subscriptions.regions import get_active_region_configs
from sentry.utils import metrics
logger = logging.getLogger(__name__)
@@ -35,10 +36,50 @@ def create_remote_uptime_subscription(uptime_subscription_id, **kwargs):
metrics.incr("uptime.subscriptions.create.incorrect_status", sample_rate=1.0)
return
- subscription_id = send_uptime_subscription_config(subscription)
- # TODO: Ideally this should actually be `PENDING_FIRST_UPDATE` so we can validate it's really working as expected
+ region_slugs = [s.region_slug for s in subscription.regions.all()]
+ if not region_slugs:
+ # XXX: Hack to make sure that region configs are sent even if we don't have region rows present.
+ # Remove once everything is in place
+ region_slugs = [get_active_region_configs()[0].slug]
+
+ for region_slug in region_slugs:
+ send_uptime_subscription_config(region_slug, subscription)
subscription.update(
- status=QuerySubscription.Status.ACTIVE.value, subscription_id=subscription_id
+ status=QuerySubscription.Status.ACTIVE.value,
+ subscription_id=subscription.subscription_id,
+ )
+
+
+@instrumented_task(
+ name="sentry.uptime.subscriptions.tasks.update_remote_uptime_subscription",
+ queue="uptime",
+ default_retry_delay=5,
+ max_retries=5,
+)
+def update_remote_uptime_subscription(uptime_subscription_id, **kwargs):
+ """
+ Pushes details of an uptime subscription to uptime subscription regions.
+ """
+ try:
+ subscription = UptimeSubscription.objects.get(id=uptime_subscription_id)
+ except UptimeSubscription.DoesNotExist:
+ metrics.incr("uptime.subscriptions.update.subscription_does_not_exist", sample_rate=1.0)
+ return
+ if subscription.status != UptimeSubscription.Status.UPDATING.value:
+ metrics.incr("uptime.subscriptions.update.incorrect_status", sample_rate=1.0)
+ return
+
+ region_slugs = [s.region_slug for s in subscription.regions.all()]
+ if not region_slugs:
+ # XXX: Hack to make sure that region configs are sent even if we don't have region rows present.
+ # Remove once everything is in place
+ region_slugs = [get_active_region_configs()[0].slug]
+
+ for region_slug in region_slugs:
+ send_uptime_subscription_config(region_slug, subscription)
+ subscription.update(
+ status=QuerySubscription.Status.ACTIVE.value,
+ subscription_id=subscription.subscription_id,
)
@@ -62,6 +103,12 @@ def delete_remote_uptime_subscription(uptime_subscription_id, **kwargs):
metrics.incr("uptime.subscriptions.delete.incorrect_status", sample_rate=1.0)
return
+ region_slugs = [s.region_slug for s in subscription.regions.all()]
+ if not region_slugs:
+ # XXX: Hack to make sure that region configs are sent even if we don't have regions present.
+ # Remove once everything is in place
+ region_slugs = [get_active_region_configs()[0].slug]
+
subscription_id = subscription.subscription_id
if subscription.status == QuerySubscription.Status.DELETING.value:
subscription.delete()
@@ -69,15 +116,16 @@ def delete_remote_uptime_subscription(uptime_subscription_id, **kwargs):
subscription.update(subscription_id=None)
if subscription_id is not None:
- send_uptime_config_deletion(subscription_id)
+ for region_slug in region_slugs:
+ send_uptime_config_deletion(region_slug, subscription_id)
-def send_uptime_subscription_config(subscription: UptimeSubscription) -> str:
- # Whenever we create/update a config we always want to generate a new subscription id. This allows us to validate
- # that the config took effect
- subscription_id = uuid4().hex
- produce_config(uptime_subscription_to_check_config(subscription, subscription_id))
- return subscription_id
+def send_uptime_subscription_config(region_slug: str, subscription: UptimeSubscription):
+ if subscription.subscription_id is None:
+ subscription.subscription_id = uuid4().hex
+ produce_config(
+ region_slug, uptime_subscription_to_check_config(subscription, subscription.subscription_id)
+ )
def uptime_subscription_to_check_config(
@@ -96,14 +144,16 @@ def uptime_subscription_to_check_config(
"request_method": subscription.method,
"request_headers": headers,
"trace_sampling": subscription.trace_sampling,
+ "active_regions": [r.region_slug for r in subscription.regions.all()],
+ "region_schedule_mode": UptimeRegionScheduleMode.ROUND_ROBIN.value,
}
if subscription.body is not None:
config["request_body"] = subscription.body
return config
-def send_uptime_config_deletion(subscription_id: str) -> None:
- produce_config_removal(subscription_id)
+def send_uptime_config_deletion(destination_region_slug: str, subscription_id: str) -> None:
+ produce_config_removal(destination_region_slug, subscription_id)
@instrumented_task(
@@ -120,6 +170,7 @@ def subscription_checker(**kwargs):
for subscription in UptimeSubscription.objects.filter(
status__in=(
UptimeSubscription.Status.CREATING.value,
+ UptimeSubscription.Status.UPDATING.value,
UptimeSubscription.Status.DELETING.value,
),
date_updated__lt=timezone.now() - SUBSCRIPTION_STATUS_MAX_AGE,
diff --git a/src/sentry/users/api/endpoints/user_authenticator_enroll.py b/src/sentry/users/api/endpoints/user_authenticator_enroll.py
index e1fdfea5d73fd9..0fe89df27321f6 100644
--- a/src/sentry/users/api/endpoints/user_authenticator_enroll.py
+++ b/src/sentry/users/api/endpoints/user_authenticator_enroll.py
@@ -13,7 +13,7 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.decorators import email_verification_required, sudo_required
+from sentry.api.decorators import primary_email_verification_required, sudo_required
from sentry.api.invite_helper import ApiInviteHelper, remove_invite_details_from_session
from sentry.api.serializers import serialize
from sentry.auth.authenticators.base import EnrollmentStatus, NewEnrollmentDisallowed
@@ -175,7 +175,7 @@ def get(self, request: Request, user: User, interface_id: str) -> HttpResponse:
return Response(response)
@sudo_required
- @email_verification_required
+ @primary_email_verification_required
def post(self, request: Request, user: User, interface_id: str) -> HttpResponse:
"""
Enroll in authenticator interface
diff --git a/src/sentry/users/api/endpoints/user_password.py b/src/sentry/users/api/endpoints/user_password.py
index a3a67682131c36..fbf5a9d29cd54b 100644
--- a/src/sentry/users/api/endpoints/user_password.py
+++ b/src/sentry/users/api/endpoints/user_password.py
@@ -13,6 +13,7 @@
from sentry.types.ratelimit import RateLimit, RateLimitCategory
from sentry.users.api.bases.user import UserEndpoint
from sentry.users.models.user import User
+from sentry.web.frontend.twofactor import reset_2fa_rate_limits
class UserPasswordSerializer(serializers.Serializer[User]):
@@ -89,4 +90,7 @@ def put(self, request: Request, user: User) -> Response:
ip_address=request.META["REMOTE_ADDR"],
send_email=True,
)
+
+ reset_2fa_rate_limits(user.id)
+
return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/src/sentry/users/models/user.py b/src/sentry/users/models/user.py
index a84b99c4cd93a0..736f96d0fa50a8 100644
--- a/src/sentry/users/models/user.py
+++ b/src/sentry/users/models/user.py
@@ -244,6 +244,9 @@ def has_verified_emails(self) -> bool:
def has_unverified_emails(self) -> bool:
return self.get_unverified_emails().exists()
+ def has_verified_primary_email(self) -> bool:
+ return self.emails.filter(is_verified=True, email=self.email).exists()
+
def has_usable_password(self) -> bool:
if self.password == "" or self.password is None:
# This is the behavior we've been relying on from Django 1.6 - 2.0.
diff --git a/src/sentry/users/services/user/model.py b/src/sentry/users/services/user/model.py
index b2c4a23d403264..f4d5082c63d6ed 100644
--- a/src/sentry/users/services/user/model.py
+++ b/src/sentry/users/services/user/model.py
@@ -87,6 +87,9 @@ def has_unverified_emails(self) -> bool:
def has_verified_emails(self) -> bool:
return len(self.get_verified_emails()) > 0
+ def has_verified_primary_email(self) -> bool:
+ return bool([e for e in self.useremails if e.is_verified and e.email == self.email])
+
def get_unverified_emails(self) -> list[RpcUserEmail]:
return [e for e in self.useremails if not e.is_verified]
diff --git a/src/sentry/users/web/accounts.py b/src/sentry/users/web/accounts.py
index 24619470f4f708..0b669ee44f869f 100644
--- a/src/sentry/users/web/accounts.py
+++ b/src/sentry/users/web/accounts.py
@@ -28,6 +28,7 @@
)
from sentry.utils import auth
from sentry.web.decorators import login_required, set_referrer_policy
+from sentry.web.frontend.twofactor import reset_2fa_rate_limits
from sentry.web.helpers import render_to_response
logger = logging.getLogger("sentry.accounts")
@@ -273,6 +274,8 @@ def recover_confirm(
send_email=True,
)
+ reset_2fa_rate_limits(user.id)
+
return login_redirect(request)
else:
form = form_cls(user=user)
diff --git a/src/sentry/utils/batching_kafka_consumer.py b/src/sentry/utils/batching_kafka_consumer.py
index eb4e4b7c154c5f..47f0530e32b42c 100644
--- a/src/sentry/utils/batching_kafka_consumer.py
+++ b/src/sentry/utils/batching_kafka_consumer.py
@@ -8,9 +8,6 @@
logger = logging.getLogger("sentry.batching-kafka-consumer")
-DEFAULT_QUEUED_MAX_MESSAGE_KBYTES = 50000
-DEFAULT_QUEUED_MIN_MESSAGES = 10000
-
def wait_for_topics(admin_client: AdminClient, topics: list[str], timeout: int = 10) -> None:
"""
diff --git a/src/sentry/utils/memory.py b/src/sentry/utils/memory.py
new file mode 100644
index 00000000000000..a1fb5809b1432a
--- /dev/null
+++ b/src/sentry/utils/memory.py
@@ -0,0 +1,17 @@
+import resource
+from contextlib import contextmanager
+
+from sentry.utils import metrics
+
+
+def get_rss_usage():
+ return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
+
+
+@contextmanager
+def track_memory_usage(metric, **kwargs):
+ before = get_rss_usage()
+ try:
+ yield
+ finally:
+ metrics.distribution(metric, get_rss_usage() - before, unit="byte", **kwargs)
diff --git a/src/sentry/utils/performance_issues/base.py b/src/sentry/utils/performance_issues/base.py
index fb4e7812ee4328..e9063c98408628 100644
--- a/src/sentry/utils/performance_issues/base.py
+++ b/src/sentry/utils/performance_issues/base.py
@@ -240,8 +240,6 @@ def total_span_time(span_list: list[dict[str, Any]]) -> float:
return total_duration * 1000
-PARAMETERIZED_SQL_QUERY_REGEX = re.compile(r"\?|\$1|%s")
-
PARAMETERIZED_URL_REGEX = re.compile(
r"""(?x)
(?P
diff --git a/src/sentry/utils/platform_categories.py b/src/sentry/utils/platform_categories.py
index 25c3e65b992809..3baeb173bbb51f 100644
--- a/src/sentry/utils/platform_categories.py
+++ b/src/sentry/utils/platform_categories.py
@@ -25,6 +25,7 @@
# Mirrors `const mobile` in sentry/static/app/data/platformCategories.tsx
# When changing this file, make sure to keep sentry/static/app/data/platformCategories.tsx in sync.
+
MOBILE = {
"android",
"apple-ios",
@@ -165,3 +166,12 @@
{id: "serverless", "name": _("Serverless"), "platforms": SERVERLESS},
{id: "temporary", "name": _("Temporary"), "platforms": TEMPORARY},
]
+
+# Mirrors `const sourceMaps` in sentry/static/app/data/platformCategories.tsx
+# When changing this file, make sure to keep sentry/static/app/data/platformCategories.tsx in sync.
+SOURCE_MAPS = {
+ *FRONTEND,
+ "react-native",
+ "cordova",
+ "electron",
+}
diff --git a/src/sentry/utils/registry.py b/src/sentry/utils/registry.py
index 0ff110e268e054..3eff6865e82051 100644
--- a/src/sentry/utils/registry.py
+++ b/src/sentry/utils/registry.py
@@ -15,9 +15,15 @@ class NoRegistrationExistsError(ValueError):
class Registry(Generic[T]):
- def __init__(self):
+ """
+ A simple generic registry that allows for registering and retrieving items by key. Reverse lookup by value is enabled by default.
+ If you have duplicate values, you may want to disable reverse lookup.
+ """
+
+ def __init__(self, enable_reverse_lookup=True):
self.registrations: dict[str, T] = {}
self.reverse_lookup: dict[T, str] = {}
+ self.enable_reverse_lookup = enable_reverse_lookup
def register(self, key: str):
def inner(item: T) -> T:
@@ -26,13 +32,14 @@ def inner(item: T) -> T:
f"A registration already exists for {key}: {self.registrations[key]}"
)
- if item in self.reverse_lookup:
- raise AlreadyRegisteredError(
- f"A registration already exists for {item}: {self.reverse_lookup[item]}"
- )
+ if self.enable_reverse_lookup:
+ if item in self.reverse_lookup:
+ raise AlreadyRegisteredError(
+ f"A registration already exists for {item}: {self.reverse_lookup[item]}"
+ )
+ self.reverse_lookup[item] = key
self.registrations[key] = item
- self.reverse_lookup[item] = key
return item
@@ -44,6 +51,8 @@ def get(self, key: str) -> T:
return self.registrations[key]
def get_key(self, item: T) -> str:
+ if not self.enable_reverse_lookup:
+ raise NotImplementedError("Reverse lookup is not enabled")
if item not in self.reverse_lookup:
raise NoRegistrationExistsError(f"No registration exists for {item}")
return self.reverse_lookup[item]
diff --git a/src/sentry/utils/samples.py b/src/sentry/utils/samples.py
index 00ce2b9158651f..f3ccda8e798758 100644
--- a/src/sentry/utils/samples.py
+++ b/src/sentry/utils/samples.py
@@ -114,7 +114,6 @@ def load_data(
trace_context=None,
fingerprint=None,
event_id=None,
- metrics_summary=None,
):
# NOTE: Before editing this data, make sure you understand the context
# in which its being used. It is NOT only used for local development and
@@ -193,9 +192,6 @@ def load_data(
start_timestamp = start_timestamp.replace(tzinfo=timezone.utc)
data["start_timestamp"] = start_timestamp.timestamp()
- if metrics_summary is not None:
- data["_metrics_summary"] = metrics_summary
-
if trace is None:
trace = uuid4().hex
if span_id is None:
@@ -411,15 +407,6 @@ def create_sample_event(
spans,
)
- if not data:
- logger.info(
- "create_sample_event: no data loaded",
- extra={
- "project_id": project.id,
- "sample_event": True,
- },
- )
- return
for key in ["parent_span_id", "hash", "exclusive_time"]:
if key in kwargs:
data["contexts"]["trace"][key] = kwargs.pop(key)
diff --git a/src/sentry/utils/sdk_crashes/sdk_crash_detection_config.py b/src/sentry/utils/sdk_crashes/sdk_crash_detection_config.py
index 0053f191a524c8..22c592ce31e73a 100644
--- a/src/sentry/utils/sdk_crashes/sdk_crash_detection_config.py
+++ b/src/sentry/utils/sdk_crashes/sdk_crash_detection_config.py
@@ -57,6 +57,10 @@ class SDKCrashDetectionConfig:
"""Whether to report fatal errors. If true, both unhandled and fatal errors are reported.
If false, only unhandled errors are reported."""
report_fatal_errors: bool
+ """The mechanism types to ignore. For example, {"console", "unhandledrejection"}. If empty, all mechanism types are captured."""
+ ignore_mechanism_type: set[str]
+ """The mechanism types to capture. For example, {"ANR", "AppExitInfo"}. Useful when you want to detect events that are neither unhandled nor fatal."""
+ allow_mechanism_type: set[str]
"""The system library path patterns to detect system frames. For example, `System/Library/*` """
system_library_path_patterns: set[str]
"""The configuration for detecting SDK frames."""
@@ -100,6 +104,8 @@ def build_sdk_crash_detection_configs() -> Sequence[SDKCrashDetectionConfig]:
"sentry.cocoa.unreal": cocoa_min_sdk_version,
},
report_fatal_errors=False,
+ ignore_mechanism_type=set(),
+ allow_mechanism_type=set(),
system_library_path_patterns={r"/System/Library/**", r"/usr/lib/**"},
sdk_frame_config=SDKFrameConfig(
function_patterns={
@@ -132,6 +138,10 @@ def build_sdk_crash_detection_configs() -> Sequence[SDKCrashDetectionConfig]:
"sentry.javascript.react-native": "4.0.0",
},
report_fatal_errors=False,
+ # used by the JS/RN SDKs
+ # https://github.com/getsentry/sentry-javascript/blob/dafd51054d8b2ab2030fa0b16ad0fd70493b6e08/packages/core/src/integrations/captureconsole.ts#L60
+ ignore_mechanism_type={"console"},
+ allow_mechanism_type=set(),
system_library_path_patterns={
r"**/react-native/Libraries/**",
r"**/react-native-community/**",
@@ -204,6 +214,8 @@ def build_sdk_crash_detection_configs() -> Sequence[SDKCrashDetectionConfig]:
"sentry.native.android": native_min_sdk_version,
},
report_fatal_errors=False,
+ ignore_mechanism_type=set(),
+ allow_mechanism_type={"ANR", "AppExitInfo"},
system_library_path_patterns={
r"java.**",
r"javax.**",
@@ -227,7 +239,7 @@ def build_sdk_crash_detection_configs() -> Sequence[SDKCrashDetectionConfig]:
function_and_path_patterns=[
FunctionAndPathPattern(
function_pattern=r"*pthread_getcpuclockid*",
- path_pattern=r"/apex/com.android.art/lib64/bionic/libc.so",
+ path_pattern=r"/apex/com.android.runtime/lib64/bionic/libc.so",
),
FunctionAndPathPattern(
function_pattern=r"*art::Trace::StopTracing*",
@@ -265,6 +277,8 @@ def build_sdk_crash_detection_configs() -> Sequence[SDKCrashDetectionConfig]:
"sentry.native.unreal": native_min_sdk_version,
},
report_fatal_errors=False,
+ ignore_mechanism_type=set(),
+ allow_mechanism_type=set(),
system_library_path_patterns={
# well known locations for unix paths
r"/lib/**",
@@ -315,6 +329,8 @@ def build_sdk_crash_detection_configs() -> Sequence[SDKCrashDetectionConfig]:
"sentry.dart.flutter": dart_min_sdk_version,
},
report_fatal_errors=True,
+ ignore_mechanism_type=set(),
+ allow_mechanism_type=set(),
system_library_path_patterns={
# Dart
r"org-dartlang-sdk:///**",
diff --git a/src/sentry/utils/sdk_crashes/sdk_crash_detector.py b/src/sentry/utils/sdk_crashes/sdk_crash_detector.py
index 83f9d9834c7636..d5130d8140b4a7 100644
--- a/src/sentry/utils/sdk_crashes/sdk_crash_detector.py
+++ b/src/sentry/utils/sdk_crashes/sdk_crash_detector.py
@@ -49,6 +49,13 @@ def should_detect_sdk_crash(
if not self.is_sdk_supported(sdk_name, sdk_version):
return False
+ mechanism_type = get_path(event_data, "exception", "values", -1, "mechanism", "type")
+ if mechanism_type and mechanism_type in self.config.ignore_mechanism_type:
+ return False
+
+ if mechanism_type and mechanism_type in self.config.allow_mechanism_type:
+ return True
+
is_unhandled = (
get_path(event_data, "exception", "values", -1, "mechanism", "handled") is False
)
diff --git a/src/sentry/utils/snuba_rpc.py b/src/sentry/utils/snuba_rpc.py
index afdb03b9b2e031..5a75c188150715 100644
--- a/src/sentry/utils/snuba_rpc.py
+++ b/src/sentry/utils/snuba_rpc.py
@@ -146,6 +146,7 @@ def _make_rpc_request(
with sentry_sdk.start_span(op="snuba_rpc.run", name=req.__class__.__name__) as span:
if referrer:
span.set_tag("snuba.referrer", referrer)
+ span.set_data("snuba.query", req)
http_resp = _snuba_pool.urlopen(
"POST",
f"/rpc/{endpoint_name}/{class_version}",
diff --git a/src/sentry/utils/strings.py b/src/sentry/utils/strings.py
index 79a797e9a87c6a..b9da4f573b7b8d 100644
--- a/src/sentry/utils/strings.py
+++ b/src/sentry/utils/strings.py
@@ -9,8 +9,6 @@
from collections.abc import Callable
from typing import overload
-from django.utils.encoding import smart_str
-
_sprintf_placeholder_re = re.compile(
r"%(?:\d+\$)?[+-]?(?:[ 0]|\'.{1})?-?\d*(?:\.\d+)?[bcdeEufFgGosxX]"
)
@@ -83,7 +81,7 @@ def decompress(value: str) -> bytes:
def strip(value: str | None) -> str:
if not value:
return ""
- return smart_str(value).strip()
+ return value.strip()
def soft_hyphenate(value: str, length: int, hyphen: str = "\u00ad") -> str:
diff --git a/src/sentry/utils/tag_normalization.py b/src/sentry/utils/tag_normalization.py
index b68776c1ff5f3e..bce6efa1b9047d 100644
--- a/src/sentry/utils/tag_normalization.py
+++ b/src/sentry/utils/tag_normalization.py
@@ -92,7 +92,7 @@ def normalize_sdk_tag(tag: str) -> str:
# collapse tags other than JavaScript / Native to their top-level SDK
- if not tag.split(".")[1] in {"javascript", "native"}:
+ if tag.split(".")[1] not in {"javascript", "native"}:
tag = ".".join(tag.split(".", 2)[0:2])
if tag.split(".")[1] == "native":
@@ -115,7 +115,7 @@ def normalized_sdk_tag_from_event(data: Mapping[str, Any]) -> str:
Note: Some platforms may keep their framework-specific values, as needed for analytics.
This is done to reduce the cardinality of the `sdk.name` tag, while keeping
- the ones interesinting to us as granual as possible.
+ the ones interesting to us as granular as possible.
"""
try:
return normalize_sdk_tag((data.get("sdk") or {}).get("name") or "other")
diff --git a/src/sentry/utils/types.py b/src/sentry/utils/types.py
index c0fdd43bf663b4..b4ed4aa90fa0bf 100644
--- a/src/sentry/utils/types.py
+++ b/src/sentry/utils/types.py
@@ -215,9 +215,3 @@ def type_from_value(value):
AnyCallable = typing.Callable[..., AnyType]
-
-
-def NonNone(value: T | None) -> T:
- """A hacked version of TS's non-null assertion operator"""
- assert value is not None
- return value
diff --git a/src/sentry/web/frontend/auth_login.py b/src/sentry/web/frontend/auth_login.py
index 4fb8f4eb2fdbde..cf19698091d73c 100644
--- a/src/sentry/web/frontend/auth_login.py
+++ b/src/sentry/web/frontend/auth_login.py
@@ -40,6 +40,7 @@
from sentry.utils.http import absolute_uri
from sentry.utils.sdk import capture_exception
from sentry.utils.urls import add_params_to_url
+from sentry.web.client_config import get_client_config
from sentry.web.forms.accounts import AuthenticationForm, RegistrationForm
from sentry.web.frontend.base import BaseView, control_silo_view
@@ -431,7 +432,7 @@ def get_ratelimited_login_form(
]
metrics.incr("login.attempt", instance="rate_limited", skip_internal=True, sample_rate=1.0)
- context = {
+ context = self.get_default_context(request=request) | {
"op": "login",
"login_form": login_form,
"referrer": request.GET.get("referrer"),
@@ -526,11 +527,10 @@ def get_default_context(self, request: Request, **kwargs) -> dict:
default_context = {
"server_hostname": get_server_hostname(),
"login_form": None,
- "organization": kwargs.pop(
- "organization", None
- ), # NOTE: not utilized in basic login page (only org login)
+ "organization": organization, # NOTE: not utilized in basic login page (only org login)
"register_form": None,
"CAN_REGISTER": False,
+ "react_config": get_client_config(request, self.active_organization),
"join_request_link": self.get_join_request_link(
organization=organization, request=request
), # NOTE: not utilized in basic login page (only org login)
@@ -702,18 +702,11 @@ def handle_basic_auth(self, request: Request, **kwargs) -> HttpResponseBase:
"login.attempt", instance="failure", skip_internal=True, sample_rate=1.0
)
- context = {
+ context = self.get_default_context(request=request, organization=organization) | {
"op": op or "login",
- "server_hostname": get_server_hostname(),
"login_form": login_form,
- "organization": organization,
"register_form": register_form,
"CAN_REGISTER": can_register,
- "join_request_link": self.get_join_request_link(
- organization=organization, request=request
- ),
- "show_login_banner": settings.SHOW_LOGIN_BANNER,
- "referrer": request.GET.get("referrer"),
}
context.update(additional_context.run_callbacks(request))
diff --git a/src/sentry/web/frontend/auth_organization_login.py b/src/sentry/web/frontend/auth_organization_login.py
index 3e2de690770ea6..18af32980bcdc5 100644
--- a/src/sentry/web/frontend/auth_organization_login.py
+++ b/src/sentry/web/frontend/auth_organization_login.py
@@ -23,14 +23,15 @@ def respond_login(self, request: Request, context, *args, **kwargs) -> HttpRespo
return self.respond("sentry/organization-login.html", context)
def handle_sso(self, request: Request, organization: RpcOrganization, auth_provider):
- referrer = request.GET.get("referrer")
if request.method == "POST":
helper = AuthHelper(
request=request,
organization=organization,
auth_provider=auth_provider,
flow=AuthHelper.FLOW_LOGIN,
- referrer=referrer, # TODO: get referrer from the form submit - not the query parms
+ referrer=request.GET.get(
+ "referrer"
+ ), # TODO: get referrer from the form submit - not the query parms
)
if request.POST.get("init"):
@@ -47,13 +48,10 @@ def handle_sso(self, request: Request, organization: RpcOrganization, auth_provi
provider = auth_provider.get_provider()
- context = {
- "CAN_REGISTER": False,
- "organization": organization,
+ context = self.get_default_context(request, organization=organization) | {
"provider_key": provider.key,
"provider_name": provider.name,
"authenticated": request.user.is_authenticated,
- "referrer": referrer,
}
return self.respond("sentry/organization-login.html", context)
diff --git a/src/sentry/web/frontend/oauth_authorize.py b/src/sentry/web/frontend/oauth_authorize.py
index e9ed3e3543fc99..163b2f973440c6 100644
--- a/src/sentry/web/frontend/oauth_authorize.py
+++ b/src/sentry/web/frontend/oauth_authorize.py
@@ -233,13 +233,14 @@ def get(self, request: HttpRequest, **kwargs) -> HttpResponseBase:
# If application is not org level we should not show organizations to choose from at all
organization_options = []
- context = {
+ context = self.get_default_context(request) | {
"user": request.user,
"application": application,
"scopes": scopes,
"permissions": permissions,
"organization_options": organization_options,
}
+
return self.respond("sentry/oauth-authorize.html", context)
def post(self, request: HttpRequest, **kwargs) -> HttpResponseBase:
diff --git a/src/sentry/web/frontend/react_page.py b/src/sentry/web/frontend/react_page.py
index 81e5c4d26b8051..68e65ed8d01463 100644
--- a/src/sentry/web/frontend/react_page.py
+++ b/src/sentry/web/frontend/react_page.py
@@ -88,6 +88,14 @@ def dns_prefetch(self) -> list[str]:
def handle_react(self, request: Request, **kwargs) -> HttpResponse:
org_context = getattr(self, "active_organization", None)
+ react_config = get_client_config(request, org_context)
+
+ user_theme = ""
+ if react_config.get("user", None) and react_config["user"].get("options", {}).get(
+ "theme", None
+ ):
+ user_theme = f"theme-{react_config['user']['options']['theme']}"
+
context = {
"CSRF_COOKIE_NAME": settings.CSRF_COOKIE_NAME,
"meta_tags": [
@@ -100,7 +108,8 @@ def handle_react(self, request: Request, **kwargs) -> HttpResponse:
# Since we already have it here from the OrganizationMixin, we can
# save some work and render it faster.
"org_context": org_context,
- "react_config": get_client_config(request, org_context),
+ "react_config": react_config,
+ "user_theme": user_theme,
}
# Force a new CSRF token to be generated and set in user's
diff --git a/src/sentry/web/frontend/twofactor.py b/src/sentry/web/frontend/twofactor.py
index 5018e624860b80..a07c041b988ca8 100644
--- a/src/sentry/web/frontend/twofactor.py
+++ b/src/sentry/web/frontend/twofactor.py
@@ -1,6 +1,7 @@
import logging
import time
from base64 import b64encode
+from urllib.parse import urlencode
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
from django.urls import reverse
@@ -17,6 +18,7 @@
from sentry.utils.email import MessageBuilder
from sentry.utils.geo import geo_by_addr
from sentry.utils.http import absolute_uri
+from sentry.web.client_config import get_client_config
from sentry.web.forms.accounts import TwoFactorForm
from sentry.web.frontend.base import BaseView, control_silo_view
from sentry.web.helpers import render_to_response
@@ -26,6 +28,36 @@
logger = logging.getLogger(__name__)
+MFA_RATE_LIMITS = {
+ "auth-2fa:user:{user_id}": {
+ "limit": 5,
+ "window": 20,
+ },
+ "auth-2fa-long:user:{user_id}": {
+ "limit": 20,
+ "window": 60 * 60,
+ },
+}
+
+
+def is_rate_limited(user_id: int) -> bool:
+ result = False
+ for key_template, rl in MFA_RATE_LIMITS.items():
+ result = result or ratelimiter.backend.is_limited(
+ key_template.format(user_id=user_id),
+ limit=rl["limit"],
+ window=rl["window"],
+ )
+ return result
+
+
+def reset_2fa_rate_limits(user_id: int):
+ for key_template, rl in MFA_RATE_LIMITS.items():
+ ratelimiter.backend.reset(
+ key_template.format(user_id=user_id),
+ window=rl["window"],
+ )
+
@control_silo_view
class TwoFactorAuthView(BaseView):
@@ -112,12 +144,16 @@ def validate_otp(self, otp, selected_interface, all_interfaces=None):
return interface
def send_notification_email(self, email, ip_address):
+ recover_uri = "{path}?{query}".format(
+ path=reverse("sentry-account-recover"), query=urlencode({"email": email})
+ )
context = {
"datetime": timezone.now(),
"email": email,
"geo": geo_by_addr(ip_address),
"ip_address": ip_address,
"url": absolute_uri(reverse("sentry-account-settings-security")),
+ "recover_url": absolute_uri(recover_uri),
}
subject = "Suspicious Activity Detected"
@@ -146,13 +182,7 @@ def handle(self, request: HttpRequest) -> HttpResponse:
challenge = activation = None
interface = self.negotiate_interface(request, interfaces)
- is_rate_limited = ratelimiter.backend.is_limited(
- f"auth-2fa:user:{user.id}", limit=5, window=20
- ) or ratelimiter.backend.is_limited(
- f"auth-2fa-long:user:{user.id}", limit=20, window=60 * 60
- )
-
- if request.method == "POST" and is_rate_limited:
+ if request.method == "POST" and is_rate_limited(user.id):
# prevent spamming due to failed 2FA attempts
if not ratelimiter.backend.is_limited(
f"auth-2fa-failed-notification:user:{user.id}", limit=1, window=30 * 60
@@ -220,6 +250,7 @@ def handle(self, request: HttpRequest) -> HttpResponse:
"interface": interface,
"other_interfaces": self.get_other_interfaces(interface, interfaces),
"activation": activation,
+ "react_config": get_client_config(request, self.active_organization),
},
request,
status=200,
diff --git a/src/sentry/web/urls.py b/src/sentry/web/urls.py
index dd5361143698c0..69cbedc6987a02 100644
--- a/src/sentry/web/urls.py
+++ b/src/sentry/web/urls.py
@@ -623,6 +623,11 @@
react_page_view,
name="sentry-customer-domain-audit-log-settings",
),
+ re_path(
+ r"^rate-limits/",
+ react_page_view,
+ name="sentry-customer-domain-rate-limits-settings",
+ ),
re_path(
r"^relay/",
react_page_view,
@@ -638,6 +643,16 @@
react_page_view,
name="sentry-customer-domain-integrations-settings",
),
+ re_path(
+ r"^dynamic-sampling/",
+ react_page_view,
+ name="sentry-customer-domain-dynamic-sampling-settings",
+ ),
+ re_path(
+ r"^feature-flags/",
+ react_page_view,
+ name="sentry-customer-domain-feature-flags-settings",
+ ),
re_path(
r"^developer-settings/",
react_page_view,
@@ -678,11 +693,6 @@
react_page_view,
name="sentry-customer-domain-legal-settings",
),
- re_path(
- r"^dynamic-sampling/",
- react_page_view,
- name="sentry-customer-domain-dynamic-sampling-settings",
- ),
re_path(
r"^(?P[\w_-]+)/$",
react_page_view,
diff --git a/src/sentry/workflow_engine/endpoints/serializers.py b/src/sentry/workflow_engine/endpoints/serializers.py
index af736bb054be54..e3975982f4e8fa 100644
--- a/src/sentry/workflow_engine/endpoints/serializers.py
+++ b/src/sentry/workflow_engine/endpoints/serializers.py
@@ -58,7 +58,7 @@ class DataConditionSerializer(Serializer):
def serialize(self, obj: DataCondition, *args, **kwargs) -> dict[str, Any]:
return {
"id": str(obj.id),
- "condition": obj.condition,
+ "condition": obj.type,
"comparison": obj.comparison,
"result": obj.condition_result,
}
diff --git a/src/sentry/workflow_engine/endpoints/validators.py b/src/sentry/workflow_engine/endpoints/validators.py
index 79312091bb79be..4a1829a7f971c6 100644
--- a/src/sentry/workflow_engine/endpoints/validators.py
+++ b/src/sentry/workflow_engine/endpoints/validators.py
@@ -37,7 +37,7 @@ def create(self) -> T:
class BaseDataConditionValidator(CamelSnakeSerializer):
- condition = serializers.CharField(
+ type = serializers.CharField(
required=True,
max_length=200,
help_text="Condition used to compare data value to the stored comparison value",
@@ -51,14 +51,8 @@ def comparison(self) -> Field:
def result(self) -> Field:
raise NotImplementedError
- @property
- def type(self) -> str:
- # TODO: This should probably at least be an enum
- raise NotImplementedError
-
def validate(self, attrs):
attrs = super().validate(attrs)
- attrs["type"] = self.type
return attrs
@@ -84,15 +78,15 @@ def supported_conditions(self) -> frozenset[Condition]:
def supported_results(self) -> frozenset[DetectorPriorityLevel]:
raise NotImplementedError
- def validate_condition(self, value: str) -> Condition:
+ def validate_type(self, value: str) -> Condition:
try:
- condition = Condition(value)
+ type = Condition(value)
except ValueError:
- condition = None
+ type = None
- if condition not in self.supported_conditions:
- raise serializers.ValidationError(f"Unsupported condition {value}")
- return condition
+ if type not in self.supported_conditions:
+ raise serializers.ValidationError(f"Unsupported type {value}")
+ return type
def validate_result(self, value: str) -> DetectorPriorityLevel:
try:
@@ -160,7 +154,6 @@ def create(self, validated_data):
)
for condition in validated_data["data_conditions"]:
DataCondition.objects.create(
- condition=condition["condition"],
comparison=condition["comparison"],
condition_result=condition["result"],
type=condition["type"],
diff --git a/src/sentry/workflow_engine/handlers/__init__.py b/src/sentry/workflow_engine/handlers/__init__.py
index 4fc3428a0ce1e4..50363cc9ce97a4 100644
--- a/src/sentry/workflow_engine/handlers/__init__.py
+++ b/src/sentry/workflow_engine/handlers/__init__.py
@@ -1,5 +1,9 @@
# Export any handlers we want to include into the registry
-__all__ = ["NotificationActionHandler", "GroupEventConditionHandler"]
+__all__ = [
+ "NotificationActionHandler",
+ "EventCreatedByDetectorConditionHandler",
+ "EventSeenCountConditionHandler",
+]
from .action import NotificationActionHandler
-from .condition import GroupEventConditionHandler
+from .condition import EventCreatedByDetectorConditionHandler, EventSeenCountConditionHandler
diff --git a/src/sentry/workflow_engine/handlers/action/notification.py b/src/sentry/workflow_engine/handlers/action/notification.py
index 97e8cf84f39818..91b8bfcc96e718 100644
--- a/src/sentry/workflow_engine/handlers/action/notification.py
+++ b/src/sentry/workflow_engine/handlers/action/notification.py
@@ -1,14 +1,15 @@
-from sentry.eventstore.models import GroupEvent
from sentry.workflow_engine.models import Action, Detector
from sentry.workflow_engine.registry import action_handler_registry
-from sentry.workflow_engine.types import ActionHandler
+from sentry.workflow_engine.types import ActionHandler, WorkflowJob
-@action_handler_registry.register(Action.Type.NOTIFICATION)
+# TODO - Enable once the PR to allow for multiple of the same funcs is merged
+# @action_handler_registry.register(Action.Type.PAGERDUTY)
+@action_handler_registry.register(Action.Type.SLACK)
class NotificationActionHandler(ActionHandler):
@staticmethod
def execute(
- evt: GroupEvent,
+ job: WorkflowJob,
action: Action,
detector: Detector,
) -> None:
diff --git a/src/sentry/workflow_engine/handlers/condition/__init__.py b/src/sentry/workflow_engine/handlers/condition/__init__.py
index 85a4596d38b75e..cc0ca9879d61bb 100644
--- a/src/sentry/workflow_engine/handlers/condition/__init__.py
+++ b/src/sentry/workflow_engine/handlers/condition/__init__.py
@@ -1,5 +1,25 @@
__all__ = [
- "GroupEventConditionHandler",
+ "EventCreatedByDetectorConditionHandler",
+ "EventSeenCountConditionHandler",
+ "EveryEventConditionHandler",
+ "ReappearedEventConditionHandler",
+ "RegressionEventConditionHandler",
+ "ExistingHighPriorityIssueConditionHandler",
+ "EventAttributeConditionHandler",
+ "FirstSeenEventConditionHandler",
+ "NewHighPriorityIssueConditionHandler",
]
-from .group_event import GroupEventConditionHandler
+from .group_event_handlers import (
+ EventAttributeConditionHandler,
+ EventCreatedByDetectorConditionHandler,
+ EventSeenCountConditionHandler,
+ EveryEventConditionHandler,
+)
+from .group_state_handlers import (
+ ExistingHighPriorityIssueConditionHandler,
+ FirstSeenEventConditionHandler,
+ NewHighPriorityIssueConditionHandler,
+ ReappearedEventConditionHandler,
+ RegressionEventConditionHandler,
+)
diff --git a/src/sentry/workflow_engine/handlers/condition/group_event.py b/src/sentry/workflow_engine/handlers/condition/group_event.py
deleted file mode 100644
index e392db084cfdd9..00000000000000
--- a/src/sentry/workflow_engine/handlers/condition/group_event.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from typing import Any
-
-from sentry.eventstore.models import GroupEvent
-from sentry.workflow_engine.models.data_condition import Condition
-from sentry.workflow_engine.registry import condition_handler_registry
-from sentry.workflow_engine.types import DataConditionHandler
-
-
-def get_nested_value(data: Any, path: str, default: Any = None) -> Any | None:
- try:
- value = data
- for part in path.split("."):
- if hasattr(value, part):
- value = getattr(value, part)
- elif hasattr(value, "get"):
- value = value.get(part)
- else:
- return default
- return value
- except Exception:
- return default
-
-
-@condition_handler_registry.register(Condition.GROUP_EVENT_ATTR_COMPARISON)
-class GroupEventConditionHandler(DataConditionHandler[GroupEvent]):
- @staticmethod
- def evaluate_value(data: GroupEvent, comparison: Any, data_filter: str) -> bool:
- event_value = get_nested_value(data, data_filter)
- return event_value == comparison
diff --git a/src/sentry/workflow_engine/handlers/condition/group_event_handlers.py b/src/sentry/workflow_engine/handlers/condition/group_event_handlers.py
new file mode 100644
index 00000000000000..f825f7eb900154
--- /dev/null
+++ b/src/sentry/workflow_engine/handlers/condition/group_event_handlers.py
@@ -0,0 +1,85 @@
+from typing import Any
+
+import sentry_sdk
+
+from sentry.eventstore.models import GroupEvent
+from sentry.rules import MatchType, match_values
+from sentry.rules.conditions.event_attribute import attribute_registry
+from sentry.utils.registry import NoRegistrationExistsError
+from sentry.workflow_engine.models.data_condition import Condition
+from sentry.workflow_engine.registry import condition_handler_registry
+from sentry.workflow_engine.types import DataConditionHandler, WorkflowJob
+
+
+@condition_handler_registry.register(Condition.EVENT_CREATED_BY_DETECTOR)
+class EventCreatedByDetectorConditionHandler(DataConditionHandler[WorkflowJob]):
+ @staticmethod
+ def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
+ event = job["event"]
+ if event.occurrence is None or event.occurrence.evidence_data is None:
+ return False
+
+ return event.occurrence.evidence_data.get("detector_id", None) == comparison
+
+
+@condition_handler_registry.register(Condition.EVERY_EVENT)
+class EveryEventConditionHandler(DataConditionHandler[WorkflowJob]):
+ @staticmethod
+ def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
+ return True
+
+
+@condition_handler_registry.register(Condition.EVENT_SEEN_COUNT)
+class EventSeenCountConditionHandler(DataConditionHandler[WorkflowJob]):
+ @staticmethod
+ def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
+ event = job["event"]
+ return event.group.times_seen == comparison
+
+
+@condition_handler_registry.register(Condition.EVENT_ATTRIBUTE)
+class EventAttributeConditionHandler(DataConditionHandler[WorkflowJob]):
+ @staticmethod
+ def get_attribute_values(event: GroupEvent, attribute: str) -> list[str]:
+ path = attribute.split(".")
+ first_attribute = path[0]
+ try:
+ attribute_handler = attribute_registry.get(first_attribute)
+ except NoRegistrationExistsError:
+ attribute_handler = None
+
+ if not attribute_handler:
+ attribute_values = []
+ else:
+ try:
+ attribute_values = attribute_handler.handle(path, event)
+ except KeyError as e:
+ attribute_values = []
+ sentry_sdk.capture_exception(e)
+
+ attribute_values = [str(value).lower() for value in attribute_values if value is not None]
+
+ return attribute_values
+
+ @staticmethod
+ def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
+ event = job["event"]
+ attribute = comparison.get("attribute", "")
+ attribute_values = EventAttributeConditionHandler.get_attribute_values(event, attribute)
+
+ match = comparison.get("match")
+ desired_value = comparison.get("value")
+ if not (match and desired_value) and not (match in (MatchType.IS_SET, MatchType.NOT_SET)):
+ return False
+
+ desired_value = str(desired_value).lower()
+
+ # NOTE: IS_SET condition differs btw tagged_event and event_attribute so not handled by match_values
+ if match == MatchType.IS_SET:
+ return bool(attribute_values)
+ elif match == MatchType.NOT_SET:
+ return not attribute_values
+
+ return match_values(
+ group_values=attribute_values, match_value=desired_value, match_type=match
+ )
diff --git a/src/sentry/workflow_engine/handlers/condition/group_state_handlers.py b/src/sentry/workflow_engine/handlers/condition/group_state_handlers.py
new file mode 100644
index 00000000000000..481de774e49243
--- /dev/null
+++ b/src/sentry/workflow_engine/handlers/condition/group_state_handlers.py
@@ -0,0 +1,73 @@
+from typing import Any
+
+from sentry.types.group import PriorityLevel
+from sentry.workflow_engine.models.data_condition import Condition
+from sentry.workflow_engine.registry import condition_handler_registry
+from sentry.workflow_engine.types import DataConditionHandler, WorkflowJob
+
+
+def is_new_event(job: WorkflowJob) -> bool:
+ state = job.get("group_state")
+ if state is None:
+ return False
+
+ workflow = job.get("workflow")
+ if workflow is None or workflow.environment_id is None:
+ return state["is_new"]
+
+ return state["is_new_group_environment"]
+
+
+@condition_handler_registry.register(Condition.REGRESSION_EVENT)
+class RegressionEventConditionHandler(DataConditionHandler[WorkflowJob]):
+ @staticmethod
+ def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
+ state = job.get("group_state")
+ if state is None:
+ return False
+
+ return state["is_regression"] == comparison
+
+
+@condition_handler_registry.register(Condition.REAPPEARED_EVENT)
+class ReappearedEventConditionHandler(DataConditionHandler[WorkflowJob]):
+ @staticmethod
+ def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
+ has_reappeared = job.get("has_reappeared")
+ if has_reappeared is None:
+ return False
+
+ return has_reappeared == comparison
+
+
+@condition_handler_registry.register(Condition.EXISTING_HIGH_PRIORITY_ISSUE)
+class ExistingHighPriorityIssueConditionHandler(DataConditionHandler[WorkflowJob]):
+ @staticmethod
+ def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
+ state = job.get("group_state")
+ if state is None or state["is_new"]:
+ return False
+
+ has_reappeared = job.get("has_reappeared", False)
+ has_escalated = job.get("has_escalated", False)
+ is_escalating = has_reappeared or has_escalated
+ return is_escalating and job["event"].group.priority == PriorityLevel.HIGH
+
+
+@condition_handler_registry.register(Condition.FIRST_SEEN_EVENT)
+class FirstSeenEventConditionHandler(DataConditionHandler[WorkflowJob]):
+ @staticmethod
+ def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
+ return is_new_event(job)
+
+
+@condition_handler_registry.register(Condition.NEW_HIGH_PRIORITY_ISSUE)
+class NewHighPriorityIssueConditionHandler(DataConditionHandler[WorkflowJob]):
+ @staticmethod
+ def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
+ is_new = is_new_event(job)
+ event = job["event"]
+ if not event.project.flags.has_high_priority_alerts:
+ return is_new
+
+ return is_new and event.group.priority == PriorityLevel.HIGH
diff --git a/src/sentry/workflow_engine/handlers/detector/stateful.py b/src/sentry/workflow_engine/handlers/detector/stateful.py
index 8171787ad38d46..67781e8795ad41 100644
--- a/src/sentry/workflow_engine/handlers/detector/stateful.py
+++ b/src/sentry/workflow_engine/handlers/detector/stateful.py
@@ -54,7 +54,7 @@ def get_dedupe_value(self, data_packet: DataPacket[T]) -> int:
pass
@abc.abstractmethod
- def get_group_key_values(self, data_packet: DataPacket[T]) -> dict[str, int]:
+ def get_group_key_values(self, data_packet: DataPacket[T]) -> dict[DetectorGroupKey, int]:
"""
Extracts the values for all the group keys that exist in the given data packet,
and returns then as a dict keyed by group_key.
@@ -70,6 +70,9 @@ def build_occurrence_and_event_data(
def build_fingerprint(self, group_key) -> list[str]:
"""
Builds a fingerprint to uniquely identify a detected issue
+
+ TODO - Take into account the data source / query that triggered the detector,
+ we'll want to create a new issue if the query changes.
"""
return [f"{self.detector.id}{':' + group_key if group_key is not None else ''}"]
@@ -84,13 +87,17 @@ def get_state_data(
group_key_detectors = self.bulk_get_detector_state(group_keys)
dedupe_keys = [self.build_dedupe_value_key(gk) for gk in group_keys]
pipeline = get_redis_client().pipeline()
+
for dk in dedupe_keys:
pipeline.get(dk)
+
group_key_dedupe_values = {
gk: int(dv) if dv else 0 for gk, dv in zip(group_keys, pipeline.execute())
}
+
pipeline.reset()
counter_updates = {}
+
if self.counter_names:
counter_keys = [
self.build_counter_value_key(gk, name)
@@ -117,7 +124,7 @@ def get_state_data(
else DetectorPriorityLevel.OK
),
dedupe_value=group_key_dedupe_values[gk],
- counter_updates=counter_updates[gk],
+ counter_updates=counter_updates.get(gk, {}),
)
return results
diff --git a/src/sentry/workflow_engine/migration_helpers/alert_rule.py b/src/sentry/workflow_engine/migration_helpers/alert_rule.py
new file mode 100644
index 00000000000000..79f2fdb43452bd
--- /dev/null
+++ b/src/sentry/workflow_engine/migration_helpers/alert_rule.py
@@ -0,0 +1,157 @@
+from sentry.incidents.grouptype import MetricAlertFire
+from sentry.incidents.models.alert_rule import AlertRule
+from sentry.snuba.models import QuerySubscription, SnubaQuery
+from sentry.users.services.user import RpcUser
+from sentry.workflow_engine.models import (
+ AlertRuleDetector,
+ AlertRuleWorkflow,
+ DataConditionGroup,
+ DataSource,
+ Detector,
+ DetectorState,
+ DetectorWorkflow,
+ Workflow,
+ WorkflowDataConditionGroup,
+)
+from sentry.workflow_engine.types import DetectorPriorityLevel
+
+
+def create_metric_alert_lookup_tables(
+ alert_rule: AlertRule,
+ detector: Detector,
+ workflow: Workflow,
+ data_source: DataSource,
+ data_condition_group: DataConditionGroup,
+) -> tuple[AlertRuleDetector, AlertRuleWorkflow, DetectorWorkflow, WorkflowDataConditionGroup]:
+ alert_rule_detector = AlertRuleDetector.objects.create(alert_rule=alert_rule, detector=detector)
+ alert_rule_workflow = AlertRuleWorkflow.objects.create(alert_rule=alert_rule, workflow=workflow)
+ detector_workflow = DetectorWorkflow.objects.create(detector=detector, workflow=workflow)
+ workflow_data_condition_group = WorkflowDataConditionGroup.objects.create(
+ condition_group=data_condition_group, workflow=workflow
+ )
+ return (
+ alert_rule_detector,
+ alert_rule_workflow,
+ detector_workflow,
+ workflow_data_condition_group,
+ )
+
+
+def create_data_source(
+ organization_id: int, snuba_query: SnubaQuery | None = None
+) -> DataSource | None:
+ if not snuba_query:
+ return None
+
+ try:
+ query_subscription = QuerySubscription.objects.get(snuba_query=snuba_query.id)
+ except QuerySubscription.DoesNotExist:
+ return None
+
+ return DataSource.objects.create(
+ organization_id=organization_id,
+ query_id=query_subscription.id,
+ type="snuba_query_subscription",
+ )
+
+
+def create_data_condition_group(organization_id: int) -> DataConditionGroup:
+ return DataConditionGroup.objects.create(
+ logic_type=DataConditionGroup.Type.ANY,
+ organization_id=organization_id,
+ )
+
+
+def create_workflow(
+ name: str,
+ organization_id: int,
+ data_condition_group: DataConditionGroup,
+ user: RpcUser | None = None,
+) -> Workflow:
+ return Workflow.objects.create(
+ name=name,
+ organization_id=organization_id,
+ when_condition_group=data_condition_group,
+ enabled=True,
+ created_by_id=user.id if user else None,
+ config={},
+ )
+
+
+def create_detector(
+ alert_rule: AlertRule,
+ project_id: int,
+ data_condition_group: DataConditionGroup,
+ user: RpcUser | None = None,
+) -> Detector:
+ return Detector.objects.create(
+ project_id=project_id,
+ enabled=True,
+ created_by_id=user.id if user else None,
+ name=alert_rule.name,
+ workflow_condition_group=data_condition_group,
+ type=MetricAlertFire.slug,
+ description=alert_rule.description,
+ owner_user_id=alert_rule.user_id,
+ owner_team=alert_rule.team,
+ config={ # TODO create a schema
+ "threshold_period": alert_rule.threshold_period,
+ "sensitivity": alert_rule.sensitivity,
+ "seasonality": alert_rule.seasonality,
+ "comparison_delta": alert_rule.comparison_delta,
+ },
+ )
+
+
+def migrate_alert_rule(
+ alert_rule: AlertRule,
+ user: RpcUser | None = None,
+) -> (
+ tuple[
+ DataSource,
+ DataConditionGroup,
+ Workflow,
+ Detector,
+ DetectorState,
+ AlertRuleDetector,
+ AlertRuleWorkflow,
+ DetectorWorkflow,
+ WorkflowDataConditionGroup,
+ ]
+ | None
+):
+ organization_id = alert_rule.organization_id
+ project = alert_rule.projects.first()
+ if not project:
+ return None
+
+ data_source = create_data_source(organization_id, alert_rule.snuba_query)
+ if not data_source:
+ return None
+
+ data_condition_group = create_data_condition_group(organization_id)
+ workflow = create_workflow(alert_rule.name, organization_id, data_condition_group, user)
+ detector = create_detector(alert_rule, project.id, data_condition_group, user)
+
+ data_source.detectors.set([detector])
+ detector_state = DetectorState.objects.create(
+ detector=detector,
+ active=False,
+ state=DetectorPriorityLevel.OK,
+ )
+ alert_rule_detector, alert_rule_workflow, detector_workflow, workflow_data_condition_group = (
+ create_metric_alert_lookup_tables(
+ alert_rule, detector, workflow, data_source, data_condition_group
+ )
+ )
+ return (
+ data_source,
+ data_condition_group,
+ workflow,
+ detector,
+ detector_state,
+ alert_rule_detector,
+ alert_rule_workflow,
+ detector_workflow,
+ workflow_data_condition_group,
+ )
diff --git a/src/sentry/workflow_engine/migration_helpers/issue_alert_conditions.py b/src/sentry/workflow_engine/migration_helpers/issue_alert_conditions.py
new file mode 100644
index 00000000000000..bd6c7bfd1e1484
--- /dev/null
+++ b/src/sentry/workflow_engine/migration_helpers/issue_alert_conditions.py
@@ -0,0 +1,107 @@
+from collections.abc import Callable
+from typing import Any
+
+from sentry.rules.conditions.event_attribute import EventAttributeCondition
+from sentry.rules.conditions.every_event import EveryEventCondition
+from sentry.rules.conditions.existing_high_priority_issue import ExistingHighPriorityIssueCondition
+from sentry.rules.conditions.first_seen_event import FirstSeenEventCondition
+from sentry.rules.conditions.new_high_priority_issue import NewHighPriorityIssueCondition
+from sentry.rules.conditions.reappeared_event import ReappearedEventCondition
+from sentry.rules.conditions.regression_event import RegressionEventCondition
+from sentry.utils.registry import Registry
+from sentry.workflow_engine.models.data_condition import Condition, DataCondition
+from sentry.workflow_engine.models.data_condition_group import DataConditionGroup
+
+data_condition_translator_registry = Registry[
+ Callable[[dict[str, Any], DataConditionGroup], DataCondition]
+]()
+
+
+def translate_to_data_condition(data: dict[str, Any], dcg: DataConditionGroup):
+ translator = data_condition_translator_registry.get(data["id"])
+ return translator(data, dcg)
+
+
+@data_condition_translator_registry.register(ReappearedEventCondition.id)
+def create_reappeared_event_data_condition(
+ data: dict[str, Any], dcg: DataConditionGroup
+) -> DataCondition:
+ return DataCondition.objects.create(
+ type=Condition.REAPPEARED_EVENT,
+ comparison=True,
+ condition_result=True,
+ condition_group=dcg,
+ )
+
+
+@data_condition_translator_registry.register(RegressionEventCondition.id)
+def create_regressed_event_data_condition(
+ data: dict[str, Any], dcg: DataConditionGroup
+) -> DataCondition:
+ return DataCondition.objects.create(
+ type=Condition.REGRESSION_EVENT,
+ comparison=True,
+ condition_result=True,
+ condition_group=dcg,
+ )
+
+
+@data_condition_translator_registry.register(EveryEventCondition.id)
+def create_every_event_data_condition(
+ data: dict[str, Any], dcg: DataConditionGroup
+) -> DataCondition:
+ return DataCondition.objects.create(
+ type=Condition.EVERY_EVENT,
+ comparison=True,
+ condition_result=True,
+ condition_group=dcg,
+ )
+
+
+@data_condition_translator_registry.register(ExistingHighPriorityIssueCondition.id)
+def create_existing_high_priority_issue_data_condition(
+ data: dict[str, Any], dcg: DataConditionGroup
+) -> DataCondition:
+ return DataCondition.objects.create(
+ type=Condition.EXISTING_HIGH_PRIORITY_ISSUE,
+ comparison=True,
+ condition_result=True,
+ condition_group=dcg,
+ )
+
+
+@data_condition_translator_registry.register(EventAttributeCondition.id)
+def create_event_attribute_data_condition(
+ data: dict[str, Any], dcg: DataConditionGroup
+) -> DataCondition:
+ comparison = {"match": data["match"], "value": data["value"], "attribute": data["attribute"]}
+ return DataCondition.objects.create(
+ type=Condition.EVENT_ATTRIBUTE,
+ comparison=comparison,
+ condition_result=True,
+ condition_group=dcg,
+ )
+
+
+@data_condition_translator_registry.register(FirstSeenEventCondition.id)
+def create_first_seen_event_data_condition(
+ data: dict[str, Any], dcg: DataConditionGroup
+) -> DataCondition:
+ return DataCondition.objects.create(
+ type=Condition.FIRST_SEEN_EVENT,
+ comparison=True,
+ condition_result=True,
+ condition_group=dcg,
+ )
+
+
+@data_condition_translator_registry.register(NewHighPriorityIssueCondition.id)
+def create_new_high_priority_issue_condition(
+ data: dict[str, Any], dcg: DataConditionGroup
+) -> DataCondition:
+ return DataCondition.objects.create(
+ type=Condition.NEW_HIGH_PRIORITY_ISSUE,
+ comparison=True,
+ condition_result=True,
+ condition_group=dcg,
+ )
diff --git a/src/sentry/workflow_engine/migrations/0016_refactor_action_model.py b/src/sentry/workflow_engine/migrations/0016_refactor_action_model.py
new file mode 100644
index 00000000000000..15511e36ffd139
--- /dev/null
+++ b/src/sentry/workflow_engine/migrations/0016_refactor_action_model.py
@@ -0,0 +1,33 @@
+# Generated by Django 5.1.4 on 2024-12-17 03:31
+
+from django.db import migrations, models
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("workflow_engine", "0015_create_rule_lookup_tables"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="action",
+ name="legacy_notification_type",
+ field=models.TextField(null=True),
+ ),
+ ]
diff --git a/src/sentry/workflow_engine/migrations/0017_ref_data_condition.py b/src/sentry/workflow_engine/migrations/0017_ref_data_condition.py
new file mode 100644
index 00000000000000..45cc499ced28fb
--- /dev/null
+++ b/src/sentry/workflow_engine/migrations/0017_ref_data_condition.py
@@ -0,0 +1,38 @@
+# Generated by Django 5.1.4 on 2024-12-18 05:36
+
+from django.db import migrations, models
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("workflow_engine", "0016_refactor_action_model"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="datacondition",
+ name="condition",
+ field=models.CharField(max_length=200, null=True),
+ ),
+ migrations.AlterField(
+ model_name="datacondition",
+ name="type",
+ field=models.CharField(default="eq", max_length=200),
+ ),
+ ]
diff --git a/src/sentry/workflow_engine/migrations/0018_rm_data_condition_condition.py b/src/sentry/workflow_engine/migrations/0018_rm_data_condition_condition.py
new file mode 100644
index 00000000000000..bc82885d5bdaaf
--- /dev/null
+++ b/src/sentry/workflow_engine/migrations/0018_rm_data_condition_condition.py
@@ -0,0 +1,33 @@
+# Generated by Django 5.1.4 on 2024-12-18 22:21
+
+from sentry.new_migrations.migrations import CheckedMigration
+from sentry.new_migrations.monkey.fields import SafeRemoveField
+from sentry.new_migrations.monkey.state import DeletionAction
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("workflow_engine", "0017_ref_data_condition"),
+ ]
+
+ operations = [
+ SafeRemoveField(
+ model_name="datacondition",
+ name="condition",
+ deletion_action=DeletionAction.MOVE_TO_PENDING,
+ ),
+ ]
diff --git a/src/sentry/workflow_engine/migrations/0019_drop_dataconditions_condition.py b/src/sentry/workflow_engine/migrations/0019_drop_dataconditions_condition.py
new file mode 100644
index 00000000000000..9c96936d1ec154
--- /dev/null
+++ b/src/sentry/workflow_engine/migrations/0019_drop_dataconditions_condition.py
@@ -0,0 +1,33 @@
+# Generated by Django 5.1.4 on 2024-12-19 19:56
+
+from sentry.new_migrations.migrations import CheckedMigration
+from sentry.new_migrations.monkey.fields import SafeRemoveField
+from sentry.new_migrations.monkey.state import DeletionAction
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("workflow_engine", "0018_rm_data_condition_condition"),
+ ]
+
+ operations = [
+ SafeRemoveField(
+ model_name="datacondition",
+ name="condition",
+ deletion_action=DeletionAction.DELETE,
+ ),
+ ]
diff --git a/src/sentry/workflow_engine/models/action.py b/src/sentry/workflow_engine/models/action.py
index 027ef5cf11c933..efc7a1e3dabd9f 100644
--- a/src/sentry/workflow_engine/models/action.py
+++ b/src/sentry/workflow_engine/models/action.py
@@ -7,10 +7,9 @@
from sentry.backup.scopes import RelocationScope
from sentry.db.models import DefaultFieldsModel, region_silo_model, sane_repr
from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey
-from sentry.eventstore.models import GroupEvent
from sentry.notifications.models.notificationaction import ActionTarget
from sentry.workflow_engine.registry import action_handler_registry
-from sentry.workflow_engine.types import ActionHandler
+from sentry.workflow_engine.types import ActionHandler, WorkflowJob
if TYPE_CHECKING:
from sentry.workflow_engine.models import Detector
@@ -30,9 +29,15 @@ class Action(DefaultFieldsModel):
__repr__ = sane_repr("id", "type")
class Type(models.TextChoices):
- NOTIFICATION = "notification"
+ EMAIL = "email"
+ SLACK = "slack"
+ PAGERDUTY = "pagerduty"
WEBHOOK = "webhook"
+ class LegacyNotificationType(models.TextChoices):
+ ISSUE_ALERT = "issue"
+ METRIC_ALERT = "metric"
+
# The type field is used to denote the type of action we want to trigger
type = models.TextField(choices=Type.choices)
data = models.JSONField(default=dict)
@@ -46,6 +51,13 @@ class Type(models.TextChoices):
"sentry.Integration", blank=True, null=True, on_delete="CASCADE"
)
+ # LEGACY: The legacy_notification_type is used to denote if this notification was for an issue alert, metric alert, etc.
+ # We need this because of how tightly coupled the notification system is with the legacy alert models
+ legacy_notification_type = models.TextField(
+ null=True,
+ choices=LegacyNotificationType.choices,
+ )
+
# LEGACY: The target_display is used to display the target's name in notifications
target_display = models.TextField(null=True)
@@ -59,7 +71,7 @@ def get_handler(self) -> ActionHandler:
action_type = Action.Type(self.type)
return action_handler_registry.get(action_type)
- def trigger(self, evt: GroupEvent, detector: Detector) -> None:
+ def trigger(self, job: WorkflowJob, detector: Detector) -> None:
# get the handler for the action type
handler = self.get_handler()
- handler.execute(evt, self, detector)
+ handler.execute(job, self, detector)
diff --git a/src/sentry/workflow_engine/models/data_condition.py b/src/sentry/workflow_engine/models/data_condition.py
index 9ed3efc66c5c55..22aecf98180a67 100644
--- a/src/sentry/workflow_engine/models/data_condition.py
+++ b/src/sentry/workflow_engine/models/data_condition.py
@@ -1,7 +1,5 @@
import logging
import operator
-from collections.abc import Callable
-from enum import StrEnum
from typing import Any, TypeVar, cast
from django.db import models
@@ -10,23 +8,27 @@
from sentry.db.models import DefaultFieldsModel, region_silo_model, sane_repr
from sentry.utils.registry import NoRegistrationExistsError
from sentry.workflow_engine.registry import condition_handler_registry
-from sentry.workflow_engine.types import (
- DataConditionHandler,
- DataConditionResult,
- DetectorPriorityLevel,
-)
+from sentry.workflow_engine.types import DataConditionResult, DetectorPriorityLevel
logger = logging.getLogger(__name__)
-class Condition(StrEnum):
+class Condition(models.TextChoices):
EQUAL = "eq"
GREATER_OR_EQUAL = "gte"
GREATER = "gt"
LESS_OR_EQUAL = "lte"
LESS = "lt"
NOT_EQUAL = "ne"
- GROUP_EVENT_ATTR_COMPARISON = "group_event_attr_comparison"
+ EVENT_ATTRIBUTE = "event_attribute"
+ EVENT_CREATED_BY_DETECTOR = "event_created_by_detector"
+ EVENT_SEEN_COUNT = "event_seen_count"
+ EVERY_EVENT = "every_event"
+ EXISTING_HIGH_PRIORITY_ISSUE = "existing_high_priority_issue"
+ FIRST_SEEN_EVENT = "first_seen_event"
+ NEW_HIGH_PRIORITY_ISSUE = "new_high_priority_issue"
+ REGRESSION_EVENT = "regression_event"
+ REAPPEARED_EVENT = "reappeared_event"
condition_ops = {
@@ -50,9 +52,6 @@ class DataCondition(DefaultFieldsModel):
__relocation_scope__ = RelocationScope.Organization
__repr__ = sane_repr("type", "condition", "condition_group")
- # The condition is the logic condition that needs to be met, gt, lt, eq, etc.
- condition = models.CharField(max_length=200)
-
# The comparison is the value that the condition is compared to for the evaluation, this must be a primitive value
comparison = models.JSONField()
@@ -60,7 +59,7 @@ class DataCondition(DefaultFieldsModel):
condition_result = models.JSONField()
# The type of condition, this is used to initialize the condition classes
- type = models.CharField(max_length=200)
+ type = models.CharField(max_length=200, choices=Condition.choices, default=Condition.EQUAL)
condition_group = models.ForeignKey(
"workflow_engine.DataConditionGroup",
@@ -85,44 +84,31 @@ def get_condition_result(self) -> DataConditionResult:
return None
- def get_condition_handler(self) -> DataConditionHandler[T] | None:
+ def evaluate_value(self, value: T) -> DataConditionResult:
try:
condition_type = Condition(self.type)
except ValueError:
- # If the type isn't in the condition, then it won't be in the registry either.
- raise NoRegistrationExistsError(f"No registration exists for {self.type}")
-
- return condition_handler_registry.get(condition_type)
+ logger.exception(
+ "Invalid condition type",
+ extra={"type": self.type, "id": self.id},
+ )
+ return None
- def evaluate_value(self, value: T) -> DataConditionResult:
- condition_handler: DataConditionHandler[T] | None = None
- op: Callable | None = None
+ if condition_type in condition_ops:
+ # If the condition is a base type, handle it directly
+ op = condition_ops[Condition(self.type)]
+ result = op(cast(Any, value), self.comparison)
+ return self.get_condition_result() if result else None
+ # Otherwise, we need to get the handler and evaluate the value
try:
- # Use a custom hanler
- condition_handler = self.get_condition_handler()
+ handler = condition_handler_registry.get(condition_type)
except NoRegistrationExistsError:
- # If it's not a custom handler, use the default operators
- condition = Condition(self.condition)
- op = condition_ops.get(condition, None)
-
- if condition_handler is not None:
- result = condition_handler.evaluate_value(value, self.comparison, self.condition)
- elif op is not None:
- result = op(cast(Any, value), self.comparison)
- else:
- logger.error(
- "Invalid Data Condition Evaluation",
- extra={
- "id": self.id,
- "type": self.type,
- "condition": self.condition,
- },
+ logger.exception(
+ "No registration exists for condition",
+ extra={"type": self.type, "id": self.id},
)
-
return None
- if result:
- return self.get_condition_result()
-
- return None
+ result = handler.evaluate_value(value, self.comparison)
+ return self.get_condition_result() if result else None
diff --git a/src/sentry/workflow_engine/models/data_source.py b/src/sentry/workflow_engine/models/data_source.py
index ebfbc63fd18f14..0ebf62f600ddd4 100644
--- a/src/sentry/workflow_engine/models/data_source.py
+++ b/src/sentry/workflow_engine/models/data_source.py
@@ -3,6 +3,8 @@
from typing import Generic, TypeVar
from django.db import models
+from django.db.models.signals import pre_save
+from django.dispatch import receiver
from sentry.backup.scopes import RelocationScope
from sentry.db.models import (
@@ -11,6 +13,7 @@
FlexibleForeignKey,
region_silo_model,
)
+from sentry.utils.registry import NoRegistrationExistsError
from sentry.workflow_engine.models.data_source_detector import DataSourceDetector
from sentry.workflow_engine.registry import data_source_type_registry
from sentry.workflow_engine.types import DataSourceTypeHandler
@@ -29,7 +32,11 @@ class DataSource(DefaultFieldsModel):
__relocation_scope__ = RelocationScope.Organization
organization = FlexibleForeignKey("sentry.Organization")
+
+ # Should this be a string so we can support UUID / ints?
query_id = BoundedBigIntegerField()
+
+ # This is a dynamic field, depending on the type in the data_source_type_registry
type = models.TextField()
detectors = models.ManyToManyField("workflow_engine.Detector", through=DataSourceDetector)
@@ -45,3 +52,19 @@ def type_handler(self) -> builtins.type[DataSourceTypeHandler]:
if not handler:
raise ValueError(f"Unknown data source type: {self.type}")
return handler
+
+
+@receiver(pre_save, sender=DataSource)
+def ensure_type_handler_registered(sender, instance: DataSource, **kwargs):
+ """
+ Ensure that the type of the data source is valid and registered in the data_source_type_registry
+ """
+ data_source_type = instance.type
+
+ if not data_source_type:
+ raise ValueError(f"No group type found with type {instance.type}")
+
+ try:
+ data_source_type_registry.get(data_source_type)
+ except NoRegistrationExistsError:
+ raise ValueError(f"No data source type found with type {data_source_type}")
diff --git a/src/sentry/workflow_engine/models/detector.py b/src/sentry/workflow_engine/models/detector.py
index c11a3851b608ce..0cc2145a06a1c5 100644
--- a/src/sentry/workflow_engine/models/detector.py
+++ b/src/sentry/workflow_engine/models/detector.py
@@ -7,6 +7,8 @@
from django.conf import settings
from django.db import models
from django.db.models import UniqueConstraint
+from django.db.models.signals import pre_save
+from django.dispatch import receiver
from sentry.backup.scopes import RelocationScope
from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model
@@ -53,17 +55,12 @@ class Detector(DefaultFieldsModel, OwnerModel, JSONConfigBase):
on_delete=models.SET_NULL,
)
- # The type of detector that is being used, this is used to determine the class
- # to load for the detector
+ # maps to registry (sentry.issues.grouptype.registry) entries for GroupType.slug in sentry.issues.grouptype.GroupType
type = models.CharField(max_length=200)
# The user that created the detector
created_by_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete="SET_NULL")
- @property
- def CONFIG_SCHEMA(self) -> dict[str, Any]:
- raise NotImplementedError('Subclasses must define a "CONFIG_SCHEMA" attribute')
-
class Meta(OwnerModel.Meta):
constraints = OwnerModel.Meta.constraints + [
UniqueConstraint(
@@ -83,7 +80,6 @@ def detector_handler(self) -> DetectorHandler | None:
logger.error(
"No registered grouptype for detector",
extra={
- "group_type": str(group_type),
"detector_id": self.id,
"detector_type": self.type,
},
@@ -105,3 +101,18 @@ def detector_handler(self) -> DetectorHandler | None:
def get_audit_log_data(self) -> dict[str, Any]:
# TODO: Create proper audit log data for the detector, group and conditions
return {}
+
+
+@receiver(pre_save, sender=Detector)
+def enforce_config_schema(sender, instance: Detector, **kwargs):
+ """
+ Ensures the detector type is valid in the grouptype registry.
+ This needs to be a signal because the grouptype registry's entries are not available at import time.
+ """
+ group_type = instance.group_type
+
+ if not group_type:
+ raise ValueError(f"No group type found with type {instance.type}")
+
+ config_schema = group_type.detector_config_schema
+ instance.validate_config(config_schema)
diff --git a/src/sentry/workflow_engine/models/json_config.py b/src/sentry/workflow_engine/models/json_config.py
index 1b353ccf18cbcb..b62b936d9c4007 100644
--- a/src/sentry/workflow_engine/models/json_config.py
+++ b/src/sentry/workflow_engine/models/json_config.py
@@ -1,4 +1,3 @@
-from abc import abstractproperty
from typing import Any
from django.db import models
@@ -8,13 +7,9 @@
class JSONConfigBase(models.Model):
config = models.JSONField(db_default={})
- @abstractproperty
- def CONFIG_SCHEMA(self) -> dict[str, Any]:
- pass
-
- def validate_config(self) -> None:
+ def validate_config(self, schema: dict[str, Any]) -> None:
try:
- validate(self.config, self.CONFIG_SCHEMA)
+ validate(self.config, schema)
except ValidationError as e:
raise ValidationError(f"Invalid config: {e.message}")
diff --git a/src/sentry/workflow_engine/models/workflow.py b/src/sentry/workflow_engine/models/workflow.py
index 13483e258f1532..b37dece9419179 100644
--- a/src/sentry/workflow_engine/models/workflow.py
+++ b/src/sentry/workflow_engine/models/workflow.py
@@ -2,13 +2,15 @@
from django.conf import settings
from django.db import models
+from django.db.models.signals import pre_save
+from django.dispatch import receiver
from sentry.backup.scopes import RelocationScope
from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model, sane_repr
from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey
-from sentry.eventstore.models import GroupEvent
from sentry.models.owner_base import OwnerModel
from sentry.workflow_engine.processors.data_condition_group import evaluate_condition_group
+from sentry.workflow_engine.types import WorkflowJob
from .json_config import JSONConfigBase
@@ -35,8 +37,20 @@ class Workflow(DefaultFieldsModel, OwnerModel, JSONConfigBase):
created_by_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete="SET_NULL")
@property
- def CONFIG_SCHEMA(self) -> dict[str, Any]:
- raise NotImplementedError('Subclasses must define a "CONFIG_SCHEMA" attribute')
+ def config_schema(self) -> dict[str, Any]:
+ return {
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "title": "Workflow Schema",
+ "type": "object",
+ "properties": {
+ "frequency": {
+ "description": "How often the workflow should fire for a Group (minutes)",
+ "type": "integer",
+ "minimum": 0,
+ },
+ },
+ "additionalProperties": False,
+ }
__repr__ = sane_repr("name", "organization_id")
@@ -50,7 +64,7 @@ class Meta:
)
]
- def evaluate_trigger_conditions(self, evt: GroupEvent) -> bool:
+ def evaluate_trigger_conditions(self, job: WorkflowJob) -> bool:
"""
Evaluate the conditions for the workflow trigger and return if the evaluation was successful.
If there aren't any workflow trigger conditions, the workflow is considered triggered.
@@ -58,5 +72,11 @@ def evaluate_trigger_conditions(self, evt: GroupEvent) -> bool:
if self.when_condition_group is None:
return True
- evaluation, _ = evaluate_condition_group(self.when_condition_group, evt)
+ job["workflow"] = self
+ evaluation, _ = evaluate_condition_group(self.when_condition_group, job)
return evaluation
+
+
+@receiver(pre_save, sender=Workflow)
+def enforce_config_schema(sender, instance: Workflow, **kwargs):
+ instance.validate_config(instance.config_schema)
diff --git a/src/sentry/workflow_engine/processors/__init__.py b/src/sentry/workflow_engine/processors/__init__.py
index 700cd48361de44..0dca1394898aeb 100644
--- a/src/sentry/workflow_engine/processors/__init__.py
+++ b/src/sentry/workflow_engine/processors/__init__.py
@@ -1,6 +1,8 @@
__all__ = [
"process_data_sources",
"process_detectors",
+ "process_workflows",
+ "process_data_packet",
]
from .data_source import process_data_sources
diff --git a/src/sentry/workflow_engine/processors/action.py b/src/sentry/workflow_engine/processors/action.py
index 0e57ee44441aea..bf03015bf37444 100644
--- a/src/sentry/workflow_engine/processors/action.py
+++ b/src/sentry/workflow_engine/processors/action.py
@@ -1,11 +1,11 @@
from sentry.db.models.manager.base_query_set import BaseQuerySet
-from sentry.eventstore.models import GroupEvent
from sentry.workflow_engine.models import Action, DataConditionGroup, Workflow
from sentry.workflow_engine.processors.data_condition_group import evaluate_condition_group
+from sentry.workflow_engine.types import WorkflowJob
def evaluate_workflow_action_filters(
- workflows: set[Workflow], evt: GroupEvent
+ workflows: set[Workflow], job: WorkflowJob
) -> BaseQuerySet[Action]:
filtered_action_groups: set[DataConditionGroup] = set()
@@ -17,7 +17,7 @@ def evaluate_workflow_action_filters(
).distinct()
for action_condition in action_conditions:
- evaluation, result = evaluate_condition_group(action_condition, evt)
+ evaluation, result = evaluate_condition_group(action_condition, job)
if evaluation:
filtered_action_groups.add(action_condition)
diff --git a/src/sentry/workflow_engine/processors/data_packet.py b/src/sentry/workflow_engine/processors/data_packet.py
new file mode 100644
index 00000000000000..35997e02f627e3
--- /dev/null
+++ b/src/sentry/workflow_engine/processors/data_packet.py
@@ -0,0 +1,24 @@
+from sentry.workflow_engine.handlers.detector import DetectorEvaluationResult
+from sentry.workflow_engine.models import DataPacket, Detector
+from sentry.workflow_engine.processors.data_source import process_data_sources
+from sentry.workflow_engine.processors.detector import process_detectors
+from sentry.workflow_engine.types import DetectorGroupKey
+
+
+def process_data_packets(
+ data_packets: list[DataPacket], query_type: str
+) -> list[tuple[Detector, dict[DetectorGroupKey, DetectorEvaluationResult]]]:
+ """
+ This method ties the two main pre-processing methods together to process
+ the incoming data and create issue occurrences.
+ """
+ processed_sources = process_data_sources(data_packets, query_type)
+
+ results: list[tuple[Detector, dict[DetectorGroupKey, DetectorEvaluationResult]]] = []
+ for data_packet, detectors in processed_sources:
+ detector_results = process_detectors(data_packet, detectors)
+
+ for detector, detector_state in detector_results:
+ results.append((detector, detector_state))
+
+ return results
diff --git a/src/sentry/workflow_engine/processors/data_source.py b/src/sentry/workflow_engine/processors/data_source.py
index f7f375ccf59892..5df9711f4b7775 100644
--- a/src/sentry/workflow_engine/processors/data_source.py
+++ b/src/sentry/workflow_engine/processors/data_source.py
@@ -14,7 +14,8 @@ def process_data_sources(
) -> list[tuple[DataPacket, list[Detector]]]:
metrics.incr("sentry.workflow_engine.process_data_sources", tags={"query_type": query_type})
- data_packet_ids = {packet.query_id for packet in data_packets}
+ # TODO - change data_source.query_id to be a string to support UUIDs
+ data_packet_ids = {int(packet.query_id) for packet in data_packets}
# Fetch all data sources and associated detectors for the given data packets
with sentry_sdk.start_span(op="sentry.workflow_engine.process_data_sources.fetch_data_sources"):
@@ -23,12 +24,12 @@ def process_data_sources(
).prefetch_related(Prefetch("detectors"))
# Build a lookup dict for query_id to detectors
- query_id_to_detectors = {ds.query_id: list(ds.detectors.all()) for ds in data_sources}
+ query_id_to_detectors = {int(ds.query_id): list(ds.detectors.all()) for ds in data_sources}
# Create the result tuples
result = []
for packet in data_packets:
- detectors = query_id_to_detectors.get(packet.query_id)
+ detectors = query_id_to_detectors.get(int(packet.query_id))
if detectors:
data_packet_tuple = (packet, detectors)
diff --git a/src/sentry/workflow_engine/processors/detector.py b/src/sentry/workflow_engine/processors/detector.py
index 60bb85e0988190..b0a038fdbcb5c6 100644
--- a/src/sentry/workflow_engine/processors/detector.py
+++ b/src/sentry/workflow_engine/processors/detector.py
@@ -2,22 +2,23 @@
import logging
-from sentry.eventstore.models import GroupEvent
+from sentry.issues.grouptype import ErrorGroupType
from sentry.issues.issue_occurrence import IssueOccurrence
from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka
from sentry.workflow_engine.handlers.detector import DetectorEvaluationResult
from sentry.workflow_engine.models import DataPacket, Detector
-from sentry.workflow_engine.types import DetectorGroupKey, DetectorType
+from sentry.workflow_engine.types import DetectorGroupKey, WorkflowJob
logger = logging.getLogger(__name__)
# TODO - cache these by evt.group_id? :thinking:
-def get_detector_by_event(evt: GroupEvent) -> Detector:
+def get_detector_by_event(job: WorkflowJob) -> Detector:
+ evt = job["event"]
issue_occurrence = evt.occurrence
if issue_occurrence is None:
- detector = Detector.objects.get(project_id=evt.project_id, type=DetectorType.ERROR)
+ detector = Detector.objects.get(project_id=evt.project_id, type=ErrorGroupType.slug)
else:
detector = Detector.objects.get(id=issue_occurrence.evidence_data.get("detector_id", None))
diff --git a/src/sentry/workflow_engine/processors/workflow.py b/src/sentry/workflow_engine/processors/workflow.py
index effc18173780a6..be2fe0f88e4e92 100644
--- a/src/sentry/workflow_engine/processors/workflow.py
+++ b/src/sentry/workflow_engine/processors/workflow.py
@@ -2,26 +2,26 @@
import sentry_sdk
-from sentry.eventstore.models import GroupEvent
from sentry.utils import metrics
from sentry.workflow_engine.models import Detector, Workflow
from sentry.workflow_engine.processors.action import evaluate_workflow_action_filters
from sentry.workflow_engine.processors.detector import get_detector_by_event
+from sentry.workflow_engine.types import WorkflowJob
logger = logging.getLogger(__name__)
-def evaluate_workflow_triggers(workflows: set[Workflow], evt: GroupEvent) -> set[Workflow]:
+def evaluate_workflow_triggers(workflows: set[Workflow], job: WorkflowJob) -> set[Workflow]:
triggered_workflows: set[Workflow] = set()
for workflow in workflows:
- if workflow.evaluate_trigger_conditions(evt):
+ if workflow.evaluate_trigger_conditions(job):
triggered_workflows.add(workflow)
return triggered_workflows
-def process_workflows(evt: GroupEvent) -> set[Workflow]:
+def process_workflows(job: WorkflowJob) -> set[Workflow]:
"""
This method will get the detector based on the event, and then gather the associated workflows.
Next, it will evaluate the "when" (or trigger) conditions for each workflow, if the conditions are met,
@@ -31,19 +31,19 @@ def process_workflows(evt: GroupEvent) -> set[Workflow]:
"""
# Check to see if the GroupEvent has an issue occurrence
try:
- detector = get_detector_by_event(evt)
+ detector = get_detector_by_event(job)
except Detector.DoesNotExist:
metrics.incr("workflow_engine.process_workflows.error")
- logger.exception("Detector not found for event", extra={"event_id": evt.event_id})
+ logger.exception("Detector not found for event", extra={"event_id": job["event"].event_id})
return set()
# Get the workflows, evaluate the when_condition_group, finally evaluate the actions for workflows that are triggered
workflows = set(Workflow.objects.filter(detectorworkflow__detector_id=detector.id).distinct())
- triggered_workflows = evaluate_workflow_triggers(workflows, evt)
- actions = evaluate_workflow_action_filters(triggered_workflows, evt)
+ triggered_workflows = evaluate_workflow_triggers(workflows, job)
+ actions = evaluate_workflow_action_filters(triggered_workflows, job)
with sentry_sdk.start_span(op="workflow_engine.process_workflows.trigger_actions"):
for action in actions:
- action.trigger(evt, detector)
+ action.trigger(job, detector)
return triggered_workflows
diff --git a/src/sentry/workflow_engine/registry.py b/src/sentry/workflow_engine/registry.py
index 2af6b070a1bcf8..ece9577feadd00 100644
--- a/src/sentry/workflow_engine/registry.py
+++ b/src/sentry/workflow_engine/registry.py
@@ -1,8 +1,6 @@
-from typing import Any
-
from sentry.utils.registry import Registry
from sentry.workflow_engine.types import ActionHandler, DataConditionHandler, DataSourceTypeHandler
data_source_type_registry = Registry[type[DataSourceTypeHandler]]()
-condition_handler_registry = Registry[DataConditionHandler[Any]]()
+condition_handler_registry = Registry[DataConditionHandler]()
action_handler_registry = Registry[ActionHandler]()
diff --git a/src/sentry/workflow_engine/types.py b/src/sentry/workflow_engine/types.py
index 50dc15a7010f22..b40f54c2a97448 100644
--- a/src/sentry/workflow_engine/types.py
+++ b/src/sentry/workflow_engine/types.py
@@ -1,13 +1,14 @@
from __future__ import annotations
-from enum import IntEnum, StrEnum
-from typing import TYPE_CHECKING, Any, Generic, TypeVar
+from enum import IntEnum
+from typing import TYPE_CHECKING, Any, Generic, TypedDict, TypeVar
from sentry.types.group import PriorityLevel
if TYPE_CHECKING:
from sentry.eventstore.models import GroupEvent
- from sentry.workflow_engine.models import Action, Detector
+ from sentry.eventstream.base import GroupState
+ from sentry.workflow_engine.models import Action, Detector, Workflow
T = TypeVar("T")
@@ -28,9 +29,22 @@ class DetectorPriorityLevel(IntEnum):
ProcessedDataConditionResult = tuple[bool, list[DataConditionResult]]
+class EventJob(TypedDict):
+ event: GroupEvent
+
+
+class WorkflowJob(EventJob, total=False):
+ group_state: GroupState
+ is_reprocessed: bool
+ has_reappeared: bool
+ has_alert: bool
+ has_escalated: bool
+ workflow: Workflow
+
+
class ActionHandler:
@staticmethod
- def execute(group_event: GroupEvent, action: Action, detector: Detector) -> None:
+ def execute(job: WorkflowJob, action: Action, detector: Detector) -> None:
raise NotImplementedError
@@ -42,9 +56,5 @@ def bulk_get_query_object(data_sources) -> dict[int, T | None]:
class DataConditionHandler(Generic[T]):
@staticmethod
- def evaluate_value(value: T, comparison: Any, condition: str) -> DataConditionResult:
+ def evaluate_value(value: T, comparison: Any) -> DataConditionResult:
raise NotImplementedError
-
-
-class DetectorType(StrEnum):
- ERROR = "ErrorDetector"
diff --git a/src/sentry_plugins/heroku/plugin.py b/src/sentry_plugins/heroku/plugin.py
index 12d587dd348ee4..6c535bb6a9dfff 100644
--- a/src/sentry_plugins/heroku/plugin.py
+++ b/src/sentry_plugins/heroku/plugin.py
@@ -11,7 +11,6 @@
from sentry.models.apikey import ApiKey
from sentry.models.options.project_option import ProjectOption
from sentry.models.repository import Repository
-from sentry.plugins.base.configuration import react_plugin_config
from sentry.plugins.bases.releasetracking import ReleaseTrackingPlugin
from sentry.plugins.interfaces.releasehook import ReleaseHook
from sentry.users.services.user.service import user_service
@@ -152,9 +151,6 @@ class HerokuPlugin(CorePluginMixin, ReleaseTrackingPlugin):
)
]
- def configure(self, project, request):
- return react_plugin_config(self, project, request)
-
def can_enable_for_projects(self):
return True
diff --git a/src/sentry_plugins/sessionstack/plugin.py b/src/sentry_plugins/sessionstack/plugin.py
index d38b36f6e6f36b..54603def4efc8b 100644
--- a/src/sentry_plugins/sessionstack/plugin.py
+++ b/src/sentry_plugins/sessionstack/plugin.py
@@ -7,7 +7,6 @@
from sentry.integrations.base import FeatureDescription, IntegrationFeatures
from sentry.interfaces.contexts import ContextType
from sentry.models.project import Project
-from sentry.plugins.base.configuration import react_plugin_config
from sentry.plugins.base.v2 import EventPreprocessor, Plugin2
from sentry.utils.settings import is_self_hosted
from sentry_plugins.base import CorePluginMixin
@@ -50,9 +49,6 @@ class SessionStackPlugin(CorePluginMixin, Plugin2):
def get_resource_links(self):
return self.resource_links + self.sessionstack_resource_links
- def configure(self, project, request):
- return react_plugin_config(self, project, request)
-
def has_project_conf(self):
return True
diff --git a/static/app/__mocks__/react-lazyload.tsx b/static/app/__mocks__/react-lazyload.tsx
index f4d66de2bb7605..a5be44b10d53e9 100644
--- a/static/app/__mocks__/react-lazyload.tsx
+++ b/static/app/__mocks__/react-lazyload.tsx
@@ -4,7 +4,7 @@
* These mocks are simple no-ops to make testing lazy-loaded components simpler.
*/
-const LazyLoad = ({children}) => children;
+const LazyLoad = ({children}: {children: React.ReactNode}) => children;
export const forceCheck = jest.fn();
diff --git a/static/app/actionCreators/dashboards.tsx b/static/app/actionCreators/dashboards.tsx
index 845fe1c8227db6..3c7a6c902bde9b 100644
--- a/static/app/actionCreators/dashboards.tsx
+++ b/static/app/actionCreators/dashboards.tsx
@@ -1,6 +1,6 @@
import omit from 'lodash/omit';
-import {addErrorMessage} from 'sentry/actionCreators/indicator';
+import {addErrorMessage, addSuccessMessage} from 'sentry/actionCreators/indicator';
import type {Client} from 'sentry/api';
import {ALL_ACCESS_PROJECTS} from 'sentry/constants/pageFilters';
import {t} from 'sentry/locale';
@@ -27,7 +27,7 @@ export function fetchDashboards(api: Client, orgSlug: string) {
if (errorResponse) {
const errors = flattenErrors(errorResponse, {});
- addErrorMessage(errors[Object.keys(errors)[0]] as string);
+ addErrorMessage(errors[Object.keys(errors)[0]!] as string);
} else {
addErrorMessage(t('Unable to fetch dashboards'));
}
@@ -73,7 +73,7 @@ export function createDashboard(
if (errorResponse) {
const errors = flattenErrors(errorResponse, {});
- addErrorMessage(errors[Object.keys(errors)[0]] as string);
+ addErrorMessage(errors[Object.keys(errors)[0]!] as string);
} else {
addErrorMessage(t('Unable to create dashboard'));
}
@@ -113,11 +113,12 @@ export async function updateDashboardFavorite(
},
}
);
+ addSuccessMessage(isFavorited ? t('Added as favorite') : t('Removed as favorite'));
} catch (response) {
const errorResponse = response?.responseJSON ?? null;
if (errorResponse) {
const errors = flattenErrors(errorResponse, {});
- addErrorMessage(errors[Object.keys(errors)[0]] as string);
+ addErrorMessage(errors[Object.keys(errors)[0]!]! as string);
} else if (isFavorited) {
addErrorMessage(t('Unable to favorite dashboard'));
} else {
@@ -144,7 +145,7 @@ export function fetchDashboard(
if (errorResponse) {
const errors = flattenErrors(errorResponse, {});
- addErrorMessage(errors[Object.keys(errors)[0]] as string);
+ addErrorMessage(errors[Object.keys(errors)[0]!] as string);
} else {
addErrorMessage(t('Unable to load dashboard'));
}
@@ -191,7 +192,7 @@ export function updateDashboard(
if (errorResponse) {
const errors = flattenErrors(errorResponse, {});
- addErrorMessage(errors[Object.keys(errors)[0]] as string);
+ addErrorMessage(errors[Object.keys(errors)[0]!] as string);
} else {
addErrorMessage(t('Unable to update dashboard'));
}
@@ -217,7 +218,7 @@ export function deleteDashboard(
if (errorResponse) {
const errors = flattenErrors(errorResponse, {});
- addErrorMessage(errors[Object.keys(errors)[0]] as string);
+ addErrorMessage(errors[Object.keys(errors)[0]!] as string);
} else {
addErrorMessage(t('Unable to delete dashboard'));
}
@@ -269,7 +270,7 @@ export function updateDashboardPermissions(
if (errorResponse) {
const errors = flattenErrors(errorResponse, {});
- addErrorMessage(errors[Object.keys(errors)[0]] as string);
+ addErrorMessage(errors[Object.keys(errors)[0]!]! as string);
} else {
addErrorMessage(t('Unable to update dashboard permissions'));
}
diff --git a/static/app/actionCreators/integrations.tsx b/static/app/actionCreators/integrations.tsx
index 3cc29123dcd076..ca6da8c9c2b959 100644
--- a/static/app/actionCreators/integrations.tsx
+++ b/static/app/actionCreators/integrations.tsx
@@ -4,84 +4,10 @@ import {
addSuccessMessage,
clearIndicators,
} from 'sentry/actionCreators/indicator';
-import {Client} from 'sentry/api';
+import type {Client} from 'sentry/api';
import {t, tct} from 'sentry/locale';
import type {Integration, Repository} from 'sentry/types/integrations';
-const api = new Client();
-
-/**
- * Removes an integration from a project.
- *
- * @param orgSlug Organization Slug
- * @param projectId Project Slug
- * @param integration The organization integration to remove
- */
-export function removeIntegrationFromProject(
- orgSlug: string,
- projectId: string,
- integration: Integration
-) {
- const endpoint = `/projects/${orgSlug}/${projectId}/integrations/${integration.id}/`;
- addLoadingMessage();
-
- return api.requestPromise(endpoint, {method: 'DELETE'}).then(
- () => {
- addSuccessMessage(t('Disabled %s for %s', integration.name, projectId));
- },
- () => {
- addErrorMessage(t('Failed to disable %s for %s', integration.name, projectId));
- }
- );
-}
-
-/**
- * Add an integration to a project
- *
- * @param orgSlug Organization Slug
- * @param projectId Project Slug
- * @param integration The organization integration to add
- */
-export function addIntegrationToProject(
- orgSlug: string,
- projectId: string,
- integration: Integration
-) {
- const endpoint = `/projects/${orgSlug}/${projectId}/integrations/${integration.id}/`;
- addLoadingMessage();
-
- return api.requestPromise(endpoint, {method: 'PUT'}).then(
- () => {
- addSuccessMessage(t('Enabled %s for %s', integration.name, projectId));
- },
- () => {
- addErrorMessage(t('Failed to enabled %s for %s', integration.name, projectId));
- }
- );
-}
-
-/**
- * Delete a respository
- *
- * @param client ApiClient
- * @param orgSlug Organization Slug
- * @param repositoryId Repository ID
- */
-export function deleteRepository(client: Client, orgSlug: string, repositoryId: string) {
- addLoadingMessage();
- const promise = client.requestPromise(
- `/organizations/${orgSlug}/repos/${repositoryId}/`,
- {
- method: 'DELETE',
- }
- );
- promise.then(
- () => clearIndicators(),
- () => addErrorMessage(t('Unable to delete repository.'))
- );
- return promise;
-}
-
/**
* Cancel the deletion of a respository
*
diff --git a/static/app/actionCreators/members.tsx b/static/app/actionCreators/members.tsx
index 61bf7ef0ff92ed..27b48e37389a9e 100644
--- a/static/app/actionCreators/members.tsx
+++ b/static/app/actionCreators/members.tsx
@@ -67,7 +67,7 @@ export function indexMembersByProject(members: Member[]): IndexedMembersByProjec
acc[project] = [];
}
if (member.user) {
- acc[project].push(member.user);
+ acc[project]!.push(member.user);
}
}
return acc;
diff --git a/static/app/actionCreators/modal.tsx b/static/app/actionCreators/modal.tsx
index f94ad494a63351..255d41d9330cbf 100644
--- a/static/app/actionCreators/modal.tsx
+++ b/static/app/actionCreators/modal.tsx
@@ -1,12 +1,15 @@
import type {Location} from 'history';
import type {ModalTypes} from 'sentry/components/globalModal';
+import type {AddToDashboardModalProps as CreateDashboardFromMetricsModalProps} from 'sentry/components/modals/createDashboardFromMetricsModal';
import type {CreateNewIntegrationModalOptions} from 'sentry/components/modals/createNewIntegrationModal';
import type {CreateReleaseIntegrationModalOptions} from 'sentry/components/modals/createReleaseIntegrationModal';
import type {DashboardWidgetQuerySelectorModalOptions} from 'sentry/components/modals/dashboardWidgetQuerySelectorModal';
+import type {ImportDashboardFromFileModalProps} from 'sentry/components/modals/importDashboardFromFileModal';
import type {InsightChartModalOptions} from 'sentry/components/modals/insightChartModal';
import type {InviteRow} from 'sentry/components/modals/inviteMembersModal/types';
import type {ReprocessEventModalOptions} from 'sentry/components/modals/reprocessEventModal';
+import type {AddToDashboardModalProps} from 'sentry/components/modals/widgetBuilder/addToDashboardModal';
import type {OverwriteWidgetModalProps} from 'sentry/components/modals/widgetBuilder/overwriteWidgetModal';
import type {WidgetViewerModalOptions} from 'sentry/components/modals/widgetViewerModal';
import type {Category} from 'sentry/components/platformPicker';
@@ -245,7 +248,7 @@ export async function openWidgetBuilderOverwriteModal(
});
}
-export async function openAddToDashboardModal(options) {
+export async function openAddToDashboardModal(options: AddToDashboardModalProps) {
const mod = await import('sentry/components/modals/widgetBuilder/addToDashboardModal');
const {default: Modal, modalCss} = mod;
@@ -255,7 +258,9 @@ export async function openAddToDashboardModal(options) {
});
}
-export async function openImportDashboardFromFileModal(options) {
+export async function openImportDashboardFromFileModal(
+ options: ImportDashboardFromFileModalProps
+) {
const mod = await import('sentry/components/modals/importDashboardFromFileModal');
const {default: Modal, modalCss} = mod;
@@ -265,7 +270,9 @@ export async function openImportDashboardFromFileModal(options) {
});
}
-export async function openCreateDashboardFromMetrics(options) {
+export async function openCreateDashboardFromMetrics(
+ options: CreateDashboardFromMetricsModalProps
+) {
const mod = await import('sentry/components/modals/createDashboardFromMetricsModal');
const {default: Modal, modalCss} = mod;
@@ -388,3 +395,13 @@ export async function openInsightChartModal(options: InsightChartModalOptions) {
openModal(deps => , {modalCss});
}
+
+export async function openAddTempestCredentialsModal(options: {
+ organization: Organization;
+ project: Project;
+}) {
+ const mod = await import('sentry/components/modals/addTempestCredentialsModal');
+ const {default: Modal} = mod;
+
+ openModal(deps => );
+}
diff --git a/static/app/actionCreators/monitors.tsx b/static/app/actionCreators/monitors.tsx
index 400dcaa98654c7..0a98c36450abad 100644
--- a/static/app/actionCreators/monitors.tsx
+++ b/static/app/actionCreators/monitors.tsx
@@ -70,7 +70,7 @@ export async function updateMonitor(
// If we are updating a single value in the monitor we can read the
// validation error for that key, otherwise fallback to the default error
const validationError =
- updateKeys.length === 1 ? respError.responseJSON?.[updateKeys[0]]?.[0] : undefined;
+ updateKeys.length === 1 ? respError.responseJSON?.[updateKeys[0]!]?.[0] : undefined;
logException(err);
addErrorMessage(validationError ?? t('Unable to update monitor.'));
diff --git a/static/app/actionCreators/organization.tsx b/static/app/actionCreators/organization.tsx
index e76f2c6e13a3a4..7d6845c1eaf6b1 100644
--- a/static/app/actionCreators/organization.tsx
+++ b/static/app/actionCreators/organization.tsx
@@ -15,9 +15,13 @@ import TeamStore from 'sentry/stores/teamStore';
import type {Organization, Team} from 'sentry/types/organization';
import type {Project} from 'sentry/types/project';
import FeatureFlagOverrides from 'sentry/utils/featureFlagOverrides';
-import FeatureObserver from 'sentry/utils/featureObserver';
+import {
+ addOrganizationFeaturesHandler,
+ buildSentryFeaturesHandler,
+} from 'sentry/utils/featureFlags';
import {getPreloadedDataPromise} from 'sentry/utils/getPreloadedData';
import parseLinkHeader from 'sentry/utils/parseLinkHeader';
+import type RequestError from 'sentry/utils/requestError/requestError';
async function fetchOrg(
api: Client,
@@ -42,8 +46,9 @@ async function fetchOrg(
}
FeatureFlagOverrides.singleton().loadOrg(org);
- FeatureObserver.singleton({}).observeOrganizationFlags({
+ addOrganizationFeaturesHandler({
organization: org,
+ handler: buildSentryFeaturesHandler('feature.organizations:'),
});
OrganizationStore.onUpdate(org, {replace: true});
@@ -139,7 +144,7 @@ export function fetchOrganizationDetails(
PageFiltersStore.onReset();
}
- const getErrorMessage = err => {
+ const getErrorMessage = (err: RequestError) => {
if (typeof err.responseJSON?.detail === 'string') {
return err.responseJSON?.detail;
}
diff --git a/static/app/actionCreators/organizations.spec.tsx b/static/app/actionCreators/organizations.spec.tsx
index deceb5f403dd28..822f1bdbaae26d 100644
--- a/static/app/actionCreators/organizations.spec.tsx
+++ b/static/app/actionCreators/organizations.spec.tsx
@@ -2,7 +2,6 @@ import {OrganizationFixture} from 'sentry-fixture/organization';
import {fetchOrganizations} from 'sentry/actionCreators/organizations';
import ConfigStore from 'sentry/stores/configStore';
-import {browserHistory} from 'sentry/utils/browserHistory';
describe('fetchOrganizations', function () {
const api = new MockApiClient();
@@ -76,6 +75,5 @@ describe('fetchOrganizations', function () {
expect(usMock).toHaveBeenCalledTimes(1);
expect(deMock).toHaveBeenCalledTimes(1);
expect(window.location.reload).not.toHaveBeenCalled();
- expect(browserHistory.replace).not.toHaveBeenCalled();
});
});
diff --git a/static/app/actionCreators/organizations.tsx b/static/app/actionCreators/organizations.tsx
index b26bec04438edf..5be559be701a4e 100644
--- a/static/app/actionCreators/organizations.tsx
+++ b/static/app/actionCreators/organizations.tsx
@@ -1,3 +1,5 @@
+import type {NavigateFunction} from 'react-router-dom';
+
import {addErrorMessage, addSuccessMessage} from 'sentry/actionCreators/indicator';
import {resetPageFilters} from 'sentry/actionCreators/pageFilters';
import type {Client} from 'sentry/api';
@@ -18,7 +20,10 @@ type RedirectRemainingOrganizationParams = {
* The organization slug
*/
orgId: string;
-
+ /**
+ * navigate function from useNavigate
+ */
+ navigate?: NavigateFunction;
/**
* Should remove org?
*/
@@ -32,6 +37,7 @@ type RedirectRemainingOrganizationParams = {
* Can optionally remove organization from organizations store.
*/
export function redirectToRemainingOrganization({
+ navigate,
orgId,
removeOrg,
}: RedirectRemainingOrganizationParams) {
@@ -40,12 +46,17 @@ export function redirectToRemainingOrganization({
org => org.status.id === 'active' && org.slug !== orgId
);
if (!allOrgs.length) {
- browserHistory.push('/organizations/new/');
+ if (navigate) {
+ navigate('/organizations/new/');
+ } else {
+ browserHistory.push('/organizations/new/');
+ }
+
return;
}
// Let's be smart and select the best org to redirect to
- const firstRemainingOrg = allOrgs[0];
+ const firstRemainingOrg = allOrgs[0]!;
const route = `/organizations/${firstRemainingOrg.slug}/issues/`;
if (USING_CUSTOMER_DOMAIN) {
@@ -54,7 +65,11 @@ export function redirectToRemainingOrganization({
return;
}
- browserHistory.push(route);
+ if (navigate) {
+ navigate(route);
+ } else {
+ browserHistory.push(route);
+ }
// Remove org from SidebarDropdown
if (removeOrg) {
diff --git a/static/app/actionCreators/pageFilters.spec.tsx b/static/app/actionCreators/pageFilters.spec.tsx
index 5fb56853b7a600..1c587d4c71e3dc 100644
--- a/static/app/actionCreators/pageFilters.spec.tsx
+++ b/static/app/actionCreators/pageFilters.spec.tsx
@@ -32,7 +32,7 @@ describe('PageFilters ActionCreators', function () {
});
describe('initializeUrlState', function () {
- let router;
+ let router: ReturnType;
const key = `global-selection:${organization.slug}`;
beforeEach(() => {
diff --git a/static/app/actionCreators/pageFilters.tsx b/static/app/actionCreators/pageFilters.tsx
index 54783e899fcbb2..ecd873d685661f 100644
--- a/static/app/actionCreators/pageFilters.tsx
+++ b/static/app/actionCreators/pageFilters.tsx
@@ -319,7 +319,7 @@ export function initializeUrlState({
if (projects && projects.length > 0) {
// If there is a list of projects from URL params, select first project
// from that list
- newProject = typeof projects === 'string' ? [Number(projects)] : [projects[0]];
+ newProject = typeof projects === 'string' ? [Number(projects)] : [projects[0]!];
} else {
// When we have finished loading the organization into the props, i.e.
// the organization slug is consistent with the URL param--Sentry will
diff --git a/static/app/actionCreators/projects.spec.tsx b/static/app/actionCreators/projects.spec.tsx
index 6b3f3e3cb79b1f..c7eef90e1537de 100644
--- a/static/app/actionCreators/projects.spec.tsx
+++ b/static/app/actionCreators/projects.spec.tsx
@@ -14,7 +14,7 @@ describe('Projects ActionCreators', function () {
expect(mock).not.toHaveBeenCalled();
_debouncedLoadStats(api, new Set([...Array(50)].map((_, i) => String(i))), {
- projectId: project.id,
+ projectId: project!.id,
orgId: organization.slug,
});
@@ -38,7 +38,7 @@ describe('Projects ActionCreators', function () {
expect(mock).not.toHaveBeenCalled();
_debouncedLoadStats(api, new Set(['1', '2', '3']), {
- projectId: project.id,
+ projectId: project!.id,
orgId: organization.slug,
query: {transactionStats: '1'},
});
diff --git a/static/app/actionCreators/repositories.spec.tsx b/static/app/actionCreators/repositories.spec.tsx
index 79531d204b8568..00d72c56a2c909 100644
--- a/static/app/actionCreators/repositories.spec.tsx
+++ b/static/app/actionCreators/repositories.spec.tsx
@@ -7,7 +7,7 @@ describe('RepositoryActionCreator', function () {
const api = new MockApiClient();
const mockData = [{id: '1'}];
- let mockResponse;
+ let mockResponse: jest.Mock;
beforeEach(() => {
MockApiClient.clearMockResponses();
@@ -50,15 +50,15 @@ describe('RepositoryActionCreator', function () {
expect(RepositoryStore.state.orgSlug).toEqual(orgSlug);
expect(RepositoryStore.state.repositories).toEqual(mockData);
- expect(RepositoryStore.state.repositoriesLoading).toEqual(false);
+ expect(RepositoryStore.state.repositoriesLoading).toBe(false);
});
it('short-circuits the JS event loop', () => {
- expect(RepositoryStore.state.repositoriesLoading).toEqual(undefined);
+ expect(RepositoryStore.state.repositoriesLoading).toBeUndefined();
getRepositories(api, {orgSlug}); // Fire Action.loadRepositories
expect(RepositoryStore.loadRepositories).toHaveBeenCalled();
// expect(RepositoryStore.loadRepositories).not.toHaveBeenCalled();
- expect(RepositoryStore.state.repositoriesLoading).toEqual(true); // Short-circuit
+ expect(RepositoryStore.state.repositoriesLoading).toBe(true); // Short-circuit
});
});
diff --git a/static/app/actionCreators/tags.tsx b/static/app/actionCreators/tags.tsx
index 7a4224ef8560f9..58ad9a03ac3690 100644
--- a/static/app/actionCreators/tags.tsx
+++ b/static/app/actionCreators/tags.tsx
@@ -248,7 +248,7 @@ export const makeFetchOrganizationTags = ({
if (end) {
query.end = end;
}
- return [`/organizations/${orgSlug}/tags/`, {query: query}];
+ return [`/organizations/${orgSlug}/tags/`, {query}];
};
export const useFetchOrganizationTags = (
diff --git a/static/app/api.spec.tsx b/static/app/api.spec.tsx
index 000801ab10375a..5886128d50cdd6 100644
--- a/static/app/api.spec.tsx
+++ b/static/app/api.spec.tsx
@@ -1,5 +1,6 @@
import {OrganizationFixture} from 'sentry-fixture/organization';
+import type {Client, ResponseMeta} from 'sentry/api';
import {isSimilarOrigin, Request, resolveHostname} from 'sentry/api';
import {PROJECT_MOVED} from 'sentry/constants/apiErrorCodes';
@@ -9,7 +10,7 @@ import OrganizationStore from './stores/organizationStore';
jest.unmock('sentry/api');
describe('api', function () {
- let api;
+ let api: Client;
beforeEach(function () {
api = new MockApiClient();
@@ -41,7 +42,9 @@ describe('api', function () {
it('does not call success callback if 302 was returned because of a project slug change', function () {
const successCb = jest.fn();
- api.activeRequests = {id: {alive: true}};
+ api.activeRequests = {
+ id: {alive: true, requestPromise: new Promise(() => null), cancel: jest.fn()},
+ };
api.wrapCallback(
'id',
successCb
@@ -60,9 +63,9 @@ describe('api', function () {
});
it('handles error callback', function () {
- jest.spyOn(api, 'wrapCallback').mockImplementation((_id, func) => func);
+ jest.spyOn(api, 'wrapCallback').mockImplementation((_id: string, func: any) => func);
const errorCb = jest.fn();
- const args = ['test', true, 1];
+ const args = ['test', true, 1] as unknown as [ResponseMeta, string, string];
api.handleRequestError(
{
id: 'test',
@@ -83,15 +86,18 @@ describe('api', function () {
path: 'test',
requestOptions: {},
},
- {},
- {}
+ {} as ResponseMeta,
+ '',
+ 'test'
)
).not.toThrow();
});
});
describe('resolveHostname', function () {
- let devUi, location, configstate;
+ let devUi: boolean | undefined;
+ let location: Location;
+ let configstate: ReturnType;
const controlPath = '/api/0/broadcasts/';
const regionPath = '/api/0/organizations/slug/issues/';
@@ -103,7 +109,7 @@ describe('resolveHostname', function () {
ConfigStore.loadInitialData({
...configstate,
- features: ['system:multi-region'],
+ features: new Set(['system:multi-region']),
links: {
organizationUrl: 'https://acme.sentry.io',
sentryUrl: 'https://sentry.io',
@@ -122,7 +128,7 @@ describe('resolveHostname', function () {
ConfigStore.loadInitialData({
...configstate,
// Remove the feature flag
- features: [],
+ features: new Set(),
});
let result = resolveHostname(controlPath);
diff --git a/static/app/bootstrap/exportGlobals.tsx b/static/app/bootstrap/exportGlobals.tsx
index 7b62c39158c546..69b9a121d592d9 100644
--- a/static/app/bootstrap/exportGlobals.tsx
+++ b/static/app/bootstrap/exportGlobals.tsx
@@ -1,18 +1,17 @@
import * as React from 'react';
-import {findDOMNode} from 'react-dom';
import {createRoot} from 'react-dom/client';
import * as Sentry from '@sentry/react';
import moment from 'moment-timezone';
import plugins from 'sentry/plugins';
-const globals = {
+const globals: Record = {
// The following globals are used in sentry-plugins webpack externals
// configuration.
React,
Sentry,
moment,
- ReactDOM: {findDOMNode, createRoot},
+ ReactDOM: {createRoot},
// django templates make use of these globals
SentryApp: {},
@@ -44,6 +43,11 @@ const SentryApp = {
};
globals.SentryApp = SentryApp;
-Object.keys(globals).forEach(name => (window[name] = globals[name]));
+Object.keys(globals).forEach(name => {
+ Object.defineProperty(window, name, {
+ value: globals[name],
+ writable: true,
+ });
+});
export {globals as exportedGlobals};
diff --git a/static/app/bootstrap/initializeSdk.spec.tsx b/static/app/bootstrap/initializeSdk.spec.tsx
index ee8e6497143b46..886da24167208e 100644
--- a/static/app/bootstrap/initializeSdk.spec.tsx
+++ b/static/app/bootstrap/initializeSdk.spec.tsx
@@ -9,7 +9,7 @@ import {
isFilteredRequestErrorEvent,
} from './initializeSdk';
-const ERROR_MAP = {
+const ERROR_MAP: Record = {
...origErrorMap,
// remove `UndefinedResponseBodyError` since we don't filter those
200: undefined,
diff --git a/static/app/bootstrap/initializeSdk.tsx b/static/app/bootstrap/initializeSdk.tsx
index 80ecff878cc382..222fda527d0715 100644
--- a/static/app/bootstrap/initializeSdk.tsx
+++ b/static/app/bootstrap/initializeSdk.tsx
@@ -1,7 +1,6 @@
// eslint-disable-next-line simple-import-sort/imports
import * as Sentry from '@sentry/react';
-import {_browserPerformanceTimeOriginMode} from '@sentry/utils';
-import type {Event} from '@sentry/types';
+import {type Event, _browserPerformanceTimeOriginMode} from '@sentry/core';
import {SENTRY_RELEASE_VERSION, SPA_DSN} from 'sentry/constants';
import type {Config} from 'sentry/types/system';
@@ -15,7 +14,6 @@ import {
useNavigationType,
} from 'react-router-dom';
import {useEffect} from 'react';
-import FeatureObserver from 'sentry/utils/featureObserver';
const SPA_MODE_ALLOW_URLS = [
'localhost',
@@ -62,17 +60,18 @@ function getSentryIntegrations() {
depth: 6,
}),
Sentry.reactRouterV6BrowserTracingIntegration({
- useEffect: useEffect,
- useLocation: useLocation,
- useNavigationType: useNavigationType,
- createRoutesFromChildren: createRoutesFromChildren,
- matchRoutes: matchRoutes,
+ useEffect,
+ useLocation,
+ useNavigationType,
+ createRoutesFromChildren,
+ matchRoutes,
}),
Sentry.browserProfilingIntegration(),
Sentry.thirdPartyErrorFilterIntegration({
filterKeys: ['sentry-spa'],
behaviour: 'apply-tag-if-contains-third-party-frames',
}),
+ Sentry.featureFlagsIntegration(),
];
return integrations;
@@ -180,15 +179,8 @@ export function initializeSdk(config: Config) {
handlePossibleUndefinedResponseBodyErrors(event);
addEndpointTagToRequestError(event);
-
lastEventId = event.event_id || hint.event_id;
- // attach feature flags to the event context
- if (event.contexts) {
- const flags = FeatureObserver.singleton({}).getFeatureFlags();
- event.contexts.flags = flags;
- }
-
return event;
},
});
@@ -225,7 +217,7 @@ export function initializeSdk(config: Config) {
images.push({
type: 'sourcemap',
code_file: filename,
- debug_id: debugIdMap[filename],
+ debug_id: debugIdMap[filename]!,
});
});
} catch (e) {
@@ -318,7 +310,7 @@ function handlePossibleUndefinedResponseBodyErrors(event: Event): void {
const causeErrorIsURBE = causeError?.type === 'UndefinedResponseBodyError';
if (mainErrorIsURBE || causeErrorIsURBE) {
- mainError.type = 'UndefinedResponseBodyError';
+ mainError!.type = 'UndefinedResponseBodyError';
event.tags = {...event.tags, undefinedResponseBody: true};
event.fingerprint = mainErrorIsURBE
? ['UndefinedResponseBodyError as main error']
@@ -327,7 +319,7 @@ function handlePossibleUndefinedResponseBodyErrors(event: Event): void {
}
export function addEndpointTagToRequestError(event: Event): void {
- const errorMessage = event.exception?.values?.[0].value || '';
+ const errorMessage = event.exception?.values?.[0]!.value || '';
// The capturing group here turns `GET /dogs/are/great 500` into just `GET /dogs/are/great`
const requestErrorRegex = new RegExp('^([A-Za-z]+ (/[^/]+)+/) \\d+$');
diff --git a/static/app/chartcuterie/config.tsx b/static/app/chartcuterie/config.tsx
index 7a659d73952ecf..af2a13061dd446 100644
--- a/static/app/chartcuterie/config.tsx
+++ b/static/app/chartcuterie/config.tsx
@@ -8,7 +8,6 @@
* into the configuration file loaded by the service.
*/
-// eslint-disable-next-line import/no-named-default
import {discoverCharts} from './discover';
import {metricAlertCharts} from './metricAlert';
import {performanceCharts} from './performance';
diff --git a/static/app/chartcuterie/discover.tsx b/static/app/chartcuterie/discover.tsx
index 50959c108139fa..ade3f7bd7c8f6f 100644
--- a/static/app/chartcuterie/discover.tsx
+++ b/static/app/chartcuterie/discover.tsx
@@ -60,10 +60,12 @@ discoverCharts.push({
AreaSeries({
name: s.key,
stack: 'area',
- data: s.data.map(([timestamp, countsForTimestamp]) => [
- timestamp * 1000,
- countsForTimestamp.reduce((acc, {count}) => acc + count, 0),
- ]),
+ data: s.data.map(
+ ([timestamp, countsForTimestamp]: [number, {count: number}[]]) => [
+ timestamp * 1000,
+ countsForTimestamp.reduce((acc, {count}) => acc + count, 0),
+ ]
+ ),
lineStyle: {color: color?.[i], opacity: 1, width: 0.4},
areaStyle: {color: color?.[i], opacity: 1},
})
@@ -121,12 +123,14 @@ discoverCharts.push({
BarSeries({
name: s.key,
stack: 'area',
- data: s.data.map(([timestamp, countsForTimestamp]) => ({
- value: [
- timestamp * 1000,
- countsForTimestamp.reduce((acc, {count}) => acc + count, 0),
- ],
- })),
+ data: s.data.map(
+ ([timestamp, countsForTimestamp]: [number, {count: number}[]]) => ({
+ value: [
+ timestamp * 1000,
+ countsForTimestamp.reduce((acc, {count}) => acc + count, 0),
+ ],
+ })
+ ),
itemStyle: {color: color?.[i], opacity: 1},
})
);
@@ -179,10 +183,12 @@ discoverCharts.push({
.map((topSeries, i) =>
AreaSeries({
stack: 'area',
- data: topSeries.data.map(([timestamp, countsForTimestamp]) => [
- timestamp * 1000,
- countsForTimestamp.reduce((acc, {count}) => acc + count, 0),
- ]),
+ data: topSeries.data.map(
+ ([timestamp, countsForTimestamp]: [number, {count: number}[]]) => [
+ timestamp * 1000,
+ countsForTimestamp.reduce((acc, {count}) => acc + count, 0),
+ ]
+ ),
lineStyle: {color: color?.[i], opacity: 1, width: 0.4},
areaStyle: {color: color?.[i], opacity: 1},
})
@@ -235,10 +241,12 @@ discoverCharts.push({
.sort((a, b) => (a.order ?? 0) - (b.order ?? 0))
.map((topSeries, i) =>
LineSeries({
- data: topSeries.data.map(([timestamp, countsForTimestamp]) => [
- timestamp * 1000,
- countsForTimestamp.reduce((acc, {count}) => acc + count, 0),
- ]),
+ data: topSeries.data.map(
+ ([timestamp, countsForTimestamp]: [number, {count: number}[]]) => [
+ timestamp * 1000,
+ countsForTimestamp.reduce((acc, {count}) => acc + count, 0),
+ ]
+ ),
lineStyle: {color: color?.[i], opacity: 1},
itemStyle: {color: color?.[i]},
})
@@ -292,10 +300,12 @@ discoverCharts.push({
.map((topSeries, i) =>
BarSeries({
stack: 'area',
- data: topSeries.data.map(([timestamp, countsForTimestamp]) => [
- timestamp * 1000,
- countsForTimestamp.reduce((acc, {count}) => acc + count, 0),
- ]),
+ data: topSeries.data.map(
+ ([timestamp, countsForTimestamp]: [number, {count: number}[]]) => [
+ timestamp * 1000,
+ countsForTimestamp.reduce((acc, {count}) => acc + count, 0),
+ ]
+ ),
itemStyle: {color: color?.[i], opacity: 1},
})
);
@@ -336,7 +346,7 @@ discoverCharts.push({
const previousPeriod = LineSeries({
name: t('previous %s', data.seriesName),
data: previous.map(([_, countsForTimestamp], i) => [
- current[i][0] * 1000,
+ current[i]![0] * 1000,
countsForTimestamp.reduce((acc, {count}) => acc + count, 0),
]),
lineStyle: {color: theme.gray200, type: 'dotted'},
@@ -372,7 +382,7 @@ discoverCharts.push({
stack: 'area',
data: s.data
.slice(dataMiddleIndex)
- .map(([timestamp, countsForTimestamp]) => [
+ .map(([timestamp, countsForTimestamp]: [number, {count: number}[]]) => [
timestamp * 1000,
countsForTimestamp.reduce((acc, {count}) => acc + count, 0),
]),
@@ -384,10 +394,12 @@ discoverCharts.push({
LineSeries({
name: t('previous %s', s.key),
stack: 'previous',
- data: previous.map(([_, countsForTimestamp], index) => [
- current[index][0] * 1000,
- countsForTimestamp.reduce((acc, {count}) => acc + count, 0),
- ]),
+ data: previous.map(
+ ([_, countsForTimestamp]: [number, {count: number}[]], index: number) => [
+ current[index][0] * 1000,
+ countsForTimestamp.reduce((acc, {count}) => acc + count, 0),
+ ]
+ ),
lineStyle: {color: previousPeriodColor?.[i], type: 'dotted'},
itemStyle: {color: previousPeriodColor?.[i]},
})
diff --git a/static/app/components/acl/feature.spec.tsx b/static/app/components/acl/feature.spec.tsx
index e86b80fb1269a7..35fe469956882f 100644
--- a/static/app/components/acl/feature.spec.tsx
+++ b/static/app/components/acl/feature.spec.tsx
@@ -285,7 +285,7 @@ describe('Feature', function () {
});
describe('using HookStore for renderDisabled', function () {
- let hookFn;
+ let hookFn: jest.Mock;
beforeEach(function () {
hookFn = jest.fn(() => null);
diff --git a/static/app/components/acl/feature.tsx b/static/app/components/acl/feature.tsx
index 83547b2d78acbe..77a3f5b91061a0 100644
--- a/static/app/components/acl/feature.tsx
+++ b/static/app/components/acl/feature.tsx
@@ -141,12 +141,12 @@ class Feature extends Component {
const shouldMatchOnlyProject = feature.match(/^projects:(.+)/);
if (shouldMatchOnlyProject) {
- return project.includes(shouldMatchOnlyProject[1]);
+ return project.includes(shouldMatchOnlyProject[1]!);
}
const shouldMatchOnlyOrg = feature.match(/^organizations:(.+)/);
if (shouldMatchOnlyOrg) {
- return organization.includes(shouldMatchOnlyOrg[1]);
+ return organization.includes(shouldMatchOnlyOrg[1]!);
}
// default, check all feature arrays
@@ -186,7 +186,7 @@ class Feature extends Component {
const hooks = HookStore.get(hookName);
if (hooks.length > 0) {
- customDisabledRender = hooks[0];
+ customDisabledRender = hooks[0]!;
}
}
const renderProps = {
diff --git a/static/app/components/acl/featureDisabledModal.spec.tsx b/static/app/components/acl/featureDisabledModal.spec.tsx
index 18e40d393f9f00..d283812ee36e5a 100644
--- a/static/app/components/acl/featureDisabledModal.spec.tsx
+++ b/static/app/components/acl/featureDisabledModal.spec.tsx
@@ -1,4 +1,4 @@
-import type {ComponentProps} from 'react';
+import type {ComponentProps, PropsWithChildren} from 'react';
import styled from '@emotion/styled';
import {render, screen} from 'sentry-test/reactTestingLibrary';
@@ -8,7 +8,7 @@ import ModalStore from 'sentry/stores/modalStore';
describe('FeatureTourModal', function () {
const onCloseModal = jest.fn();
- const styledWrapper = styled(c => c.children);
+ const styledWrapper = styled((c: PropsWithChildren) => c.children);
const renderComponent = (
props: Partial> = {}
) =>
diff --git a/static/app/components/actions/archive.spec.tsx b/static/app/components/actions/archive.spec.tsx
index 742681dfb0ea90..9e03ae6198d901 100644
--- a/static/app/components/actions/archive.spec.tsx
+++ b/static/app/components/actions/archive.spec.tsx
@@ -82,10 +82,10 @@ describe('ArchiveActions', () => {
render( );
await userEvent.click(screen.getByRole('button', {name: 'Archive options'}));
expect(
- screen.queryByRole('menuitemradio', {name: 'Until this occurs again\u2026'})
+ screen.getByRole('menuitemradio', {name: 'Until this occurs again\u2026'})
).toBeInTheDocument();
expect(
- screen.queryByRole('menuitemradio', {
+ screen.getByRole('menuitemradio', {
name: 'Until this affects an additional\u2026',
})
).toBeInTheDocument();
diff --git a/static/app/components/activity/note/input.tsx b/static/app/components/activity/note/input.tsx
index 5e64720754d9ce..a1ac2f35ffcc8f 100644
--- a/static/app/components/activity/note/input.tsx
+++ b/static/app/components/activity/note/input.tsx
@@ -132,7 +132,7 @@ function NoteInput({
);
const handleChange: MentionsInputProps['onChange'] = useCallback(
- e => {
+ (e: MentionChangeEvent) => {
setValue(e.target.value);
onChange?.(e, {updating: existingItem});
},
@@ -140,7 +140,7 @@ function NoteInput({
);
const handleKeyDown: MentionsInputProps['onKeyDown'] = useCallback(
- e => {
+ (e: React.KeyboardEvent) => {
// Auto submit the form on [meta,ctrl] + Enter
if (e.key === 'Enter' && (e.metaKey || e.ctrlKey) && canSubmit) {
submitForm();
diff --git a/static/app/components/activity/note/inputWithStorage.tsx b/static/app/components/activity/note/inputWithStorage.tsx
index cef5b960a8eb1c..284b8b2493dab5 100644
--- a/static/app/components/activity/note/inputWithStorage.tsx
+++ b/static/app/components/activity/note/inputWithStorage.tsx
@@ -129,7 +129,7 @@ function NoteInputWithStorage({
}
// Remove `itemKey` from stored object and save to storage
- // eslint-disable-next-line no-unused-vars
+
const {[itemKey]: _oldItem, ...newStorageObj} = storageObj;
saveToStorage(storageKey, newStorageObj);
},
diff --git a/static/app/components/alerts/notificationBar.tsx b/static/app/components/alerts/notificationBar.tsx
deleted file mode 100644
index ab7dc6666cd47b..00000000000000
--- a/static/app/components/alerts/notificationBar.tsx
+++ /dev/null
@@ -1,23 +0,0 @@
-import styled from '@emotion/styled';
-
-import {IconInfo} from 'sentry/icons';
-import {space} from 'sentry/styles/space';
-
-const StyledNotificationBarIconInfo = styled(IconInfo)`
- margin-right: ${space(1)};
- color: ${p => p.theme.alert.info.color};
-`;
-
-export const NotificationBar = styled('div')`
- display: flex;
- align-items: center;
- color: ${p => p.theme.textColor};
- background-color: ${p => p.theme.alert.info.backgroundLight};
- border-bottom: 1px solid ${p => p.theme.alert.info.border};
- padding: ${space(1.5)};
- font-size: 14px;
- line-height: normal;
- ${StyledNotificationBarIconInfo} {
- color: ${p => p.theme.alert.info.color};
- }
-`;
diff --git a/static/app/components/alerts/snoozeAlert.tsx b/static/app/components/alerts/snoozeAlert.tsx
index 890b48bfab5515..046fb3e43529b1 100644
--- a/static/app/components/alerts/snoozeAlert.tsx
+++ b/static/app/components/alerts/snoozeAlert.tsx
@@ -9,9 +9,9 @@ import {DropdownMenu} from 'sentry/components/dropdownMenu';
import {IconChevron, IconMute, IconSound} from 'sentry/icons';
import {t} from 'sentry/locale';
import {RuleActionsCategories} from 'sentry/types/alerts';
-import {browserHistory} from 'sentry/utils/browserHistory';
import useApi from 'sentry/utils/useApi';
import {useLocation} from 'sentry/utils/useLocation';
+import {useNavigate} from 'sentry/utils/useNavigate';
import useOrganization from 'sentry/utils/useOrganization';
type Props = {
@@ -42,6 +42,7 @@ function SnoozeAlert({
const organization = useOrganization();
const api = useApi();
const location = useLocation();
+ const navigate = useNavigate();
const [disabled, setDisabled] = useState(false);
@@ -62,10 +63,13 @@ function SnoozeAlert({
);
if (autoMute) {
- browserHistory.replace({
- pathname: location.pathname,
- query: {...location.query, mute: undefined},
- });
+ navigate(
+ {
+ pathname: location.pathname,
+ query: {...location.query, mute: undefined},
+ },
+ {replace: true}
+ );
}
setDisabled(false);
@@ -87,6 +91,7 @@ function SnoozeAlert({
},
[
api,
+ navigate,
isSnoozed,
location.pathname,
location.query,
diff --git a/static/app/components/analyticsArea.spec.tsx b/static/app/components/analyticsArea.spec.tsx
index f44d5163a4c73a..061861bde7f180 100644
--- a/static/app/components/analyticsArea.spec.tsx
+++ b/static/app/components/analyticsArea.spec.tsx
@@ -15,7 +15,7 @@ function TestButton({org}: {org: Organization}) {
onClick={() => {
analytics.trackAnalytics('button-clicked', {
organization: org,
- area: area,
+ area,
});
}}
/>
@@ -41,7 +41,7 @@ describe('AnalyticsAreaProvider', function () {
await userEvent.click(button);
expect(analyticsSpy).toHaveBeenCalledWith('button-clicked', {
- organization: organization,
+ organization,
area: 'feedback.details.activity',
});
});
@@ -61,7 +61,7 @@ describe('AnalyticsAreaProvider', function () {
await userEvent.click(button);
expect(analyticsSpy).toHaveBeenCalledWith('button-clicked', {
- organization: organization,
+ organization,
area: 'my-modal',
});
});
diff --git a/static/app/components/arithmeticInput/parser.spec.tsx b/static/app/components/arithmeticInput/parser.spec.tsx
index d20321ae27ac3b..e58d1ae75350c7 100644
--- a/static/app/components/arithmeticInput/parser.spec.tsx
+++ b/static/app/components/arithmeticInput/parser.spec.tsx
@@ -2,17 +2,17 @@ import {Operation, parseArithmetic} from 'sentry/components/arithmeticInput/pars
describe('arithmeticInput/parser', function () {
it('errors on too many operators', () => {
- expect(parseArithmetic('1+1+1+1+1+1+1+1+1+1+1+1').error).toEqual(
+ expect(parseArithmetic('1+1+1+1+1+1+1+1+1+1+1+1').error).toBe(
'Maximum operators exceeded'
);
});
it('errors on divide by 0', () => {
- expect(parseArithmetic('1/0').error).toEqual('Division by 0 is not allowed');
+ expect(parseArithmetic('1/0').error).toBe('Division by 0 is not allowed');
});
it('handles one term', () => {
- expect(parseArithmetic('1').result).toStrictEqual('1');
+ expect(parseArithmetic('1').result).toBe('1');
});
it('handles some addition', () => {
diff --git a/static/app/components/arithmeticInput/parser.tsx b/static/app/components/arithmeticInput/parser.tsx
index 1c22966f4837ba..c73635d051f99f 100644
--- a/static/app/components/arithmeticInput/parser.tsx
+++ b/static/app/components/arithmeticInput/parser.tsx
@@ -54,7 +54,7 @@ export class TokenConverter {
tokenTerm = (maybeFactor: Expression, remainingAdds: Array): Expression => {
if (remainingAdds.length > 0) {
- remainingAdds[0].lhs = maybeFactor;
+ remainingAdds[0]!.lhs = maybeFactor;
return flatten(remainingAdds);
}
return maybeFactor;
@@ -75,7 +75,7 @@ export class TokenConverter {
};
tokenFactor = (primary: Expression, remaining: Array): Operation => {
- remaining[0].lhs = primary;
+ remaining[0]!.lhs = primary;
return flatten(remaining);
};
diff --git a/static/app/components/assigneeBadge.stories.tsx b/static/app/components/assigneeBadge.stories.tsx
index 3786ba961aaf48..d087d6e37455f4 100644
--- a/static/app/components/assigneeBadge.stories.tsx
+++ b/static/app/components/assigneeBadge.stories.tsx
@@ -42,7 +42,7 @@ export default storyBook('AssigneeBadge', story => {
const [chevron2Toggle, setChevron2Toggle] = useState<'up' | 'down'>('down');
const team: Team = teams.length
- ? teams[0]
+ ? teams[0]!
: {
id: '1',
slug: 'team-slug',
diff --git a/static/app/components/assigneeSelectorDropdown.spec.tsx b/static/app/components/assigneeSelectorDropdown.spec.tsx
index 675aef098928c5..8c28fb3c99255b 100644
--- a/static/app/components/assigneeSelectorDropdown.spec.tsx
+++ b/static/app/components/assigneeSelectorDropdown.spec.tsx
@@ -572,7 +572,7 @@ describe('AssigneeSelectorDropdown', () => {
// Suggested assignee initials
expect(options[0]).toHaveTextContent('AB');
- await userEvent.click(options[0]);
+ await userEvent.click(options[0]!);
await waitFor(() =>
expect(assignGroup2Mock).toHaveBeenCalledWith(
diff --git a/static/app/components/assigneeSelectorDropdown.tsx b/static/app/components/assigneeSelectorDropdown.tsx
index 62ccdd19f9a339..0615ae973d26e7 100644
--- a/static/app/components/assigneeSelectorDropdown.tsx
+++ b/static/app/components/assigneeSelectorDropdown.tsx
@@ -155,6 +155,7 @@ export function AssigneeAvatar({
}
if (suggestedActors.length > 0) {
+ const actor = suggestedActors[0]!;
return (
{tct('Suggestion: [name]', {
- name:
- suggestedActors[0].type === 'team'
- ? `#${suggestedActors[0].name}`
- : suggestedActors[0].name,
+ name: actor.type === 'team' ? `#${actor.name}` : actor.name,
})}
{suggestedActors.length > 1 &&
tn(' + %s other', ' + %s others', suggestedActors.length - 1)}
-
- {suggestedReasons[suggestedActors[0].suggestedReason]}
-
+ {suggestedReasons[actor.suggestedReason]}
}
/>
@@ -265,7 +261,10 @@ export default function AssigneeSelectorDropdown({
const uniqueSuggestions = uniqBy(suggestedOwners, owner => owner.owner);
return uniqueSuggestions
.map(suggestion => {
- const [suggestionType, suggestionId] = suggestion.owner.split(':');
+ const [suggestionType, suggestionId] = suggestion.owner.split(':') as [
+ string,
+ string,
+ ];
const suggestedReasonText = suggestedReasonTable[suggestion.type];
if (suggestionType === 'user') {
const member = currentMemberList.find(user => user.id === suggestionId);
@@ -322,7 +321,7 @@ export default function AssigneeSelectorDropdown({
}
// See makeMemberOption and makeTeamOption for how the value is formatted
const type = selectedOption.value.startsWith('user:') ? 'user' : 'team';
- const assigneeId = selectedOption.value.split(':')[1];
+ const assigneeId = selectedOption.value.split(':')[1]!;
let assignee: User | Actor;
if (type === 'user') {
@@ -344,10 +343,10 @@ export default function AssigneeSelectorDropdown({
actor => actor.type === type && actor.id === assignee.id
);
onAssign({
- assignee: assignee,
+ assignee,
id: assigneeId,
- type: type,
- suggestedAssignee: suggestedAssignee,
+ type,
+ suggestedAssignee,
});
}
};
diff --git a/static/app/components/assistant/guideAnchor.tsx b/static/app/components/assistant/guideAnchor.tsx
index 81ac2c234cfc8c..efbd244f529460 100644
--- a/static/app/components/assistant/guideAnchor.tsx
+++ b/static/app/components/assistant/guideAnchor.tsx
@@ -1,4 +1,4 @@
-import {Component, createRef, Fragment, useEffect} from 'react';
+import {Component, Fragment, useEffect, useRef} from 'react';
import styled from '@emotion/styled';
import * as Sentry from '@sentry/react';
import type {Query} from 'history';
@@ -44,7 +44,7 @@ type Props = {
};
function ScrollToGuide({children}: {children: React.ReactNode}) {
- const containerElement = createRef();
+ const containerElement = useRef(null);
useEffect(() => {
if (containerElement.current) {
@@ -155,7 +155,7 @@ class BaseGuideAnchor extends Component {
const totalStepCount = currentGuide.steps.length;
const currentStepCount = step + 1;
- const currentStep = currentGuide.steps[step];
+ const currentStep = currentGuide.steps[step]!;
const lastStep = currentStepCount === totalStepCount;
const hasManySteps = totalStepCount > 1;
diff --git a/static/app/components/autoComplete.spec.tsx b/static/app/components/autoComplete.spec.tsx
index e85f910f870f6a..fb87a2fe581697 100644
--- a/static/app/components/autoComplete.spec.tsx
+++ b/static/app/components/autoComplete.spec.tsx
@@ -23,7 +23,7 @@ const items = [
* "controlled" props where does not handle state
*/
describe('AutoComplete', function () {
- let input;
+ let input: HTMLInputElement;
let autoCompleteState: any[] = [];
const mocks = {
onSelect: jest.fn(),
@@ -36,12 +36,30 @@ describe('AutoComplete', function () {
autoCompleteState = [];
});
- function List({registerItemCount, itemCount, ...props}) {
+ function List({
+ registerItemCount,
+ itemCount,
+ ...props
+ }: {
+ children: React.ReactNode;
+ itemCount: number;
+ registerItemCount: (count?: number) => void;
+ }) {
useEffect(() => void registerItemCount(itemCount), [itemCount, registerItemCount]);
return ;
}
- function Item({registerVisibleItem, item, index, ...props}) {
+ function Item({
+ registerVisibleItem,
+ item,
+ index,
+ ...props
+ }: {
+ children: React.ReactNode;
+ index: number;
+ item: {name?: string};
+ registerVisibleItem: (index: number, item: any) => () => void;
+ }) {
useEffect(() => registerVisibleItem(index, item), [registerVisibleItem, index, item]);
return ;
}
@@ -198,7 +216,7 @@ describe('AutoComplete', function () {
expect(screen.getByTestId('test-autocomplete')).toBeInTheDocument();
expect(screen.getAllByRole('option')).toHaveLength(3);
- fireEvent.click(screen.getByText(items[1].name));
+ fireEvent.click(screen.getByText(items[1]!.name));
expect(mocks.onSelect).toHaveBeenCalledWith(
items[1],
expect.objectContaining({inputValue: '', highlightedIndex: 0}),
@@ -401,7 +419,7 @@ describe('AutoComplete', function () {
createWrapper({isOpen: true});
expect(screen.getAllByRole('option')).toHaveLength(3);
- fireEvent.click(screen.getByText(items[1].name));
+ fireEvent.click(screen.getByText(items[1]!.name));
expect(mocks.onSelect).toHaveBeenCalledWith(
items[1],
expect.objectContaining({inputValue: '', highlightedIndex: 0}),
diff --git a/static/app/components/autoplayVideo.spec.tsx b/static/app/components/autoplayVideo.spec.tsx
index 04c3411c074b64..38a9702457ffec 100644
--- a/static/app/components/autoplayVideo.spec.tsx
+++ b/static/app/components/autoplayVideo.spec.tsx
@@ -1,4 +1,3 @@
-// eslint-disable-next-line no-restricted-imports
import * as React from 'react';
import {render, screen} from 'sentry-test/reactTestingLibrary';
diff --git a/static/app/components/avatar/avatarList.spec.tsx b/static/app/components/avatar/avatarList.spec.tsx
index 9d8046e23da216..3e6adb97006c43 100644
--- a/static/app/components/avatar/avatarList.spec.tsx
+++ b/static/app/components/avatar/avatarList.spec.tsx
@@ -42,12 +42,12 @@ describe('AvatarList', () => {
];
renderComponent({users});
- expect(screen.getByText(users[0].name.charAt(0))).toBeInTheDocument();
- expect(screen.getByText(users[1].name.charAt(0))).toBeInTheDocument();
- expect(screen.getByText(users[2].name.charAt(0))).toBeInTheDocument();
- expect(screen.getByText(users[3].name.charAt(0))).toBeInTheDocument();
- expect(screen.getByText(users[4].name.charAt(0))).toBeInTheDocument();
- expect(screen.getByText(users[5].name.charAt(0))).toBeInTheDocument();
+ expect(screen.getByText(users[0]!.name.charAt(0))).toBeInTheDocument();
+ expect(screen.getByText(users[1]!.name.charAt(0))).toBeInTheDocument();
+ expect(screen.getByText(users[2]!.name.charAt(0))).toBeInTheDocument();
+ expect(screen.getByText(users[3]!.name.charAt(0))).toBeInTheDocument();
+ expect(screen.getByText(users[4]!.name.charAt(0))).toBeInTheDocument();
+ expect(screen.getByText(users[5]!.name.charAt(0))).toBeInTheDocument();
expect(screen.queryByTestId('avatarList-collapsedavatars')).not.toBeInTheDocument();
});
@@ -63,12 +63,12 @@ describe('AvatarList', () => {
];
renderComponent({users});
- expect(screen.getByText(users[0].name.charAt(0))).toBeInTheDocument();
- expect(screen.getByText(users[1].name.charAt(0))).toBeInTheDocument();
- expect(screen.getByText(users[2].name.charAt(0))).toBeInTheDocument();
- expect(screen.getByText(users[3].name.charAt(0))).toBeInTheDocument();
- expect(screen.getByText(users[4].name.charAt(0))).toBeInTheDocument();
- expect(screen.queryByText(users[5].name.charAt(0))).not.toBeInTheDocument();
+ expect(screen.getByText(users[0]!.name.charAt(0))).toBeInTheDocument();
+ expect(screen.getByText(users[1]!.name.charAt(0))).toBeInTheDocument();
+ expect(screen.getByText(users[2]!.name.charAt(0))).toBeInTheDocument();
+ expect(screen.getByText(users[3]!.name.charAt(0))).toBeInTheDocument();
+ expect(screen.getByText(users[4]!.name.charAt(0))).toBeInTheDocument();
+ expect(screen.queryByText(users[5]!.name.charAt(0))).not.toBeInTheDocument();
expect(screen.getByTestId('avatarList-collapsedavatars')).toBeInTheDocument();
});
diff --git a/static/app/components/avatar/avatarList.tsx b/static/app/components/avatar/avatarList.tsx
index df9932016668b3..e6234e62fd611f 100644
--- a/static/app/components/avatar/avatarList.tsx
+++ b/static/app/components/avatar/avatarList.tsx
@@ -69,9 +69,9 @@ function AvatarList({
if (numCollapsedAvatars === 1) {
if (visibleTeamAvatars.length < teams.length) {
- visibleTeamAvatars.unshift(teams[teams.length - 1]);
+ visibleTeamAvatars.unshift(teams[teams.length - 1]!);
} else if (visibleUserAvatars.length < users.length) {
- visibleUserAvatars.unshift(users[users.length - 1]);
+ visibleUserAvatars.unshift(users[users.length - 1]!);
}
numCollapsedAvatars = 0;
}
diff --git a/static/app/components/avatar/index.spec.tsx b/static/app/components/avatar/index.spec.tsx
index 4959b79988bfad..e26f04fe0ccb10 100644
--- a/static/app/components/avatar/index.spec.tsx
+++ b/static/app/components/avatar/index.spec.tsx
@@ -293,7 +293,7 @@ describe('Avatar', function () {
avatar2.unmount();
// avatarType of `default`
- sentryApp.avatars![0].avatarType = 'default';
+ sentryApp.avatars![0]!.avatarType = 'default';
render( );
expect(screen.getByTestId('default-sentry-app-avatar')).toBeInTheDocument();
});
diff --git a/static/app/components/avatar/suggestedAvatarStack.tsx b/static/app/components/avatar/suggestedAvatarStack.tsx
index ee03cdb641d307..cd869de11fd4ff 100644
--- a/static/app/components/avatar/suggestedAvatarStack.tsx
+++ b/static/app/components/avatar/suggestedAvatarStack.tsx
@@ -29,7 +29,7 @@ function SuggestedAvatarStack({
{suggestedOwners.slice(0, numAvatars - 1).map((owner, i) => (
))}
{
// Normalize diff across dimensions so that negative diffs are always making
// the cropper smaller and positive ones are making the cropper larger
- const helpers = {
+ const helpers: Record number> = {
getDiffNE,
getDiffNW,
getDiffSE,
getDiffSW,
} as const;
- const diff = helpers['getDiff' + resizeDirection!.toUpperCase()](yDiff, xDiff);
+ const diff = helpers['getDiff' + resizeDirection!.toUpperCase()]!(yDiff, xDiff);
let height = container.clientHeight - oldDimensions.top;
let width = container.clientWidth - oldDimensions.left;
diff --git a/static/app/components/badge/featureBadge.tsx b/static/app/components/badge/featureBadge.tsx
index 1d400ef746e844..f88860a620c577 100644
--- a/static/app/components/badge/featureBadge.tsx
+++ b/static/app/components/badge/featureBadge.tsx
@@ -1,8 +1,8 @@
import {Fragment, type ReactNode} from 'react';
import {useTheme} from '@emotion/react';
import styled from '@emotion/styled';
+import type {SeverityLevel} from '@sentry/core';
import {captureException, withScope} from '@sentry/react';
-import type {SeverityLevel} from '@sentry/types';
import Badge from 'sentry/components/badge/badge';
import CircleIndicator from 'sentry/components/circleIndicator';
diff --git a/static/app/components/breadcrumbs.tsx b/static/app/components/breadcrumbs.tsx
index 86b691c607520f..a8c62cc13f4428 100644
--- a/static/app/components/breadcrumbs.tsx
+++ b/static/app/components/breadcrumbs.tsx
@@ -87,7 +87,7 @@ export function Breadcrumbs({crumbs, linkLastItem = false, ...props}: Props) {
}
if (!linkLastItem) {
- const lastCrumb = crumbs[crumbs.length - 1];
+ const lastCrumb = crumbs[crumbs.length - 1]!;
if (!isCrumbDropdown(lastCrumb)) {
lastCrumb.to = null;
}
diff --git a/static/app/components/calendar/calendarStylesWrapper.tsx b/static/app/components/calendar/calendarStylesWrapper.tsx
index a6a4d117171fbf..2be8a9b7f7b583 100644
--- a/static/app/components/calendar/calendarStylesWrapper.tsx
+++ b/static/app/components/calendar/calendarStylesWrapper.tsx
@@ -244,6 +244,10 @@ const CalendarStylesWrapper = styled('div')`
border-left-color: ${p => p.theme.textColor};
margin: 0;
}
+
+ .rdrDayPassive {
+ visibility: hidden;
+ }
`;
export default CalendarStylesWrapper;
diff --git a/static/app/components/carousel.spec.tsx b/static/app/components/carousel.spec.tsx
index b50f3848d3a8b2..3d93ffcb9e7d6b 100644
--- a/static/app/components/carousel.spec.tsx
+++ b/static/app/components/carousel.spec.tsx
@@ -67,9 +67,9 @@ describe('Carousel', function () {
expect(screen.queryByRole('button', {name: 'Scroll left'})).not.toBeInTheDocument();
// Test scroll into view, the 2nd element should have its 'scrollIntoView' called
- elements[1].scrollIntoView = jest.fn();
+ elements[1]!.scrollIntoView = jest.fn();
await userEvent.click(rightButton);
- expect(elements[1].scrollIntoView).toHaveBeenCalled();
+ expect(elements[1]!.scrollIntoView).toHaveBeenCalled();
});
it('shows left arrow when elements exist to the left', async function () {
@@ -100,9 +100,9 @@ describe('Carousel', function () {
expect(screen.queryByRole('button', {name: 'Scroll right'})).not.toBeInTheDocument();
// Test scroll into view, the 1st element should have its 'scrollIntoView' called
- elements[0].scrollIntoView = jest.fn();
+ elements[0]!.scrollIntoView = jest.fn();
await userEvent.click(leftButton);
- expect(elements[0].scrollIntoView).toHaveBeenCalled();
+ expect(elements[0]!.scrollIntoView).toHaveBeenCalled();
});
it('skips an element when it is past the visibleRatio', async function () {
@@ -133,8 +133,8 @@ describe('Carousel', function () {
expect(screen.queryByRole('button', {name: 'Scroll left'})).not.toBeInTheDocument();
// Test scroll into view, the 2nd element should have its 'scrollIntoView' called
- elements[2].scrollIntoView = jest.fn();
+ elements[2]!.scrollIntoView = jest.fn();
await userEvent.click(rightButton);
- expect(elements[2].scrollIntoView).toHaveBeenCalled();
+ expect(elements[2]!.scrollIntoView).toHaveBeenCalled();
});
});
diff --git a/static/app/components/carousel.tsx b/static/app/components/carousel.tsx
index bf699cc842a8ca..25accfb344c045 100644
--- a/static/app/components/carousel.tsx
+++ b/static/app/components/carousel.tsx
@@ -36,7 +36,7 @@ function Carousel({children, visibleRatio = 0.8}: CarouselProps) {
const scrollLeft = useCallback(
() =>
- childrenEls[visibility.findIndex(Boolean) - 1].scrollIntoView({
+ childrenEls[visibility.findIndex(Boolean) - 1]!.scrollIntoView({
behavior: 'smooth',
block: 'nearest',
inline: 'start',
@@ -46,7 +46,7 @@ function Carousel({children, visibleRatio = 0.8}: CarouselProps) {
const scrollRight = useCallback(
() =>
- childrenEls[visibility.findLastIndex(Boolean) + 1].scrollIntoView({
+ childrenEls[visibility.findLastIndex(Boolean) + 1]!.scrollIntoView({
behavior: 'smooth',
block: 'nearest',
inline: 'end',
diff --git a/static/app/components/charts/barChartZoom.tsx b/static/app/components/charts/barChartZoom.tsx
index a02995129bbd13..c374de959a5d94 100644
--- a/static/app/components/charts/barChartZoom.tsx
+++ b/static/app/components/charts/barChartZoom.tsx
@@ -3,7 +3,11 @@ import type {Location} from 'history';
import DataZoomInside from 'sentry/components/charts/components/dataZoomInside';
import ToolBox from 'sentry/components/charts/components/toolBox';
-import type {EChartChartReadyHandler, EChartDataZoomHandler} from 'sentry/types/echarts';
+import type {
+ EChartChartReadyHandler,
+ EChartDataZoomHandler,
+ ECharts,
+} from 'sentry/types/echarts';
import {browserHistory} from 'sentry/utils/browserHistory';
type RenderProps = {
@@ -69,7 +73,7 @@ class BarChartZoom extends Component {
/**
* Enable zoom immediately instead of having to toggle to zoom
*/
- handleChartReady = chart => {
+ handleChartReady = (chart: ECharts) => {
this.props.onChartReady?.(chart);
};
@@ -108,8 +112,8 @@ class BarChartZoom extends Component {
if (startValue !== null && endValue !== null) {
const {buckets, location, paramStart, paramEnd, minZoomWidth, onHistoryPush} =
this.props;
- const {start} = buckets[startValue];
- const {end} = buckets[endValue];
+ const {start} = buckets[startValue]!;
+ const {end} = buckets[endValue]!;
if (minZoomWidth === undefined || end - start > minZoomWidth) {
const target = {
diff --git a/static/app/components/charts/baseChart.spec.tsx b/static/app/components/charts/baseChart.spec.tsx
index c4010f998e590c..930bcbe0ba1df5 100644
--- a/static/app/components/charts/baseChart.spec.tsx
+++ b/static/app/components/charts/baseChart.spec.tsx
@@ -21,9 +21,9 @@ describe('BaseChart', function () {
);
// @ts-expect-error
const series = ReactEchartsCore.mock.calls[0][0].option.series;
- expect(series.length).toEqual(1);
+ expect(series).toHaveLength(1);
expect(series[0].lineStyle.color).toEqual(theme.gray200);
- expect(series[0].lineStyle.type).toEqual('dotted');
+ expect(series[0].lineStyle.type).toBe('dotted');
});
it('renders with lightened colored dotted previous period when using multiple series', function () {
@@ -47,12 +47,12 @@ describe('BaseChart', function () {
// @ts-expect-error
ReactEchartsCore.mock.calls[ReactEchartsCore.mock.calls.length - 1][0].option
.series;
- expect(series.length).toEqual(3);
- expect(series[0].lineStyle.color).toEqual('rgb(98, 100, 146)');
- expect(series[0].lineStyle.type).toEqual('dotted');
- expect(series[1].lineStyle.color).toEqual('rgb(244, 116, 157)');
- expect(series[1].lineStyle.type).toEqual('dotted');
- expect(series[2].lineStyle.color).toEqual('rgb(255, 213, 48)');
- expect(series[2].lineStyle.type).toEqual('dotted');
+ expect(series).toHaveLength(3);
+ expect(series[0].lineStyle.color).toBe('rgb(98, 100, 146)');
+ expect(series[0].lineStyle.type).toBe('dotted');
+ expect(series[1].lineStyle.color).toBe('rgb(244, 116, 157)');
+ expect(series[1].lineStyle.type).toBe('dotted');
+ expect(series[2].lineStyle.color).toBe('rgb(255, 213, 48)');
+ expect(series[2].lineStyle.type).toBe('dotted');
});
});
diff --git a/static/app/components/charts/baseChart.tsx b/static/app/components/charts/baseChart.tsx
index a98c80419dcafd..ebd1281bb3988e 100644
--- a/static/app/components/charts/baseChart.tsx
+++ b/static/app/components/charts/baseChart.tsx
@@ -323,8 +323,8 @@ export interface BaseChartProps {
const DEFAULT_CHART_READY = () => {};
const DEFAULT_OPTIONS = {};
-const DEFAULT_SERIES = [];
-const DEFAULT_ADDITIONAL_SERIES = [];
+const DEFAULT_SERIES: SeriesOption[] = [];
+const DEFAULT_ADDITIONAL_SERIES: LineSeriesOption[] = [];
const DEFAULT_Y_AXIS = {};
const DEFAULT_X_AXIS = {};
@@ -398,7 +398,7 @@ function BaseChartUnwrapped({
const resolvedSeries = useMemo(() => {
const previousPeriodColors =
- (previousPeriod?.length ?? 0) > 1 ? lightenHexToRgb(color) : undefined;
+ (previousPeriod?.length ?? 0) > 1 ? lightenHexToRgb(color as string[]) : undefined;
const hasSinglePoints = (series as LineSeriesOption[] | undefined)?.every(
s => Array.isArray(s.data) && s.data.length <= 1
@@ -606,22 +606,22 @@ function BaseChartUnwrapped({
const eventsMap = useMemo(
() =>
({
- click: (props, instance) => {
+ click: (props, instance: ECharts) => {
handleClick(props, instance);
onClick?.(props, instance);
},
- highlight: (props, instance) => onHighlight?.(props, instance),
- mouseout: (props, instance) => onMouseOut?.(props, instance),
- mouseover: (props, instance) => onMouseOver?.(props, instance),
- datazoom: (props, instance) => onDataZoom?.(props, instance),
- restore: (props, instance) => onRestore?.(props, instance),
- finished: (props, instance) => onFinished?.(props, instance),
- rendered: (props, instance) => onRendered?.(props, instance),
- legendselectchanged: (props, instance) =>
+ highlight: (props, instance: ECharts) => onHighlight?.(props, instance),
+ mouseout: (props, instance: ECharts) => onMouseOut?.(props, instance),
+ mouseover: (props, instance: ECharts) => onMouseOver?.(props, instance),
+ datazoom: (props, instance: ECharts) => onDataZoom?.(props, instance),
+ restore: (props, instance: ECharts) => onRestore?.(props, instance),
+ finished: (props, instance: ECharts) => onFinished?.(props, instance),
+ rendered: (props, instance: ECharts) => onRendered?.(props, instance),
+ legendselectchanged: (props, instance: ECharts) =>
onLegendSelectChanged?.(props, instance),
- brush: (props, instance) => onBrushStart?.(props, instance),
- brushend: (props, instance) => onBrushEnd?.(props, instance),
- brushselected: (props, instance) => onBrushSelected?.(props, instance),
+ brush: (props, instance: ECharts) => onBrushStart?.(props, instance),
+ brushend: (props, instance: ECharts) => onBrushEnd?.(props, instance),
+ brushselected: (props, instance: ECharts) => onBrushSelected?.(props, instance),
}) as ReactEchartProps['onEvents'],
[
onClick,
diff --git a/static/app/components/charts/baseChartHeightResize.spec.tsx b/static/app/components/charts/baseChartHeightResize.spec.tsx
index babd9bc5ed995b..84f4aec2e1d8ce 100644
--- a/static/app/components/charts/baseChartHeightResize.spec.tsx
+++ b/static/app/components/charts/baseChartHeightResize.spec.tsx
@@ -17,7 +17,7 @@ jest.mock('echarts-for-react/lib/core', () => {
};
});
-function TestContainer({children}) {
+function TestContainer({children}: {children: React.ReactNode}) {
return (
{children}
);
diff --git a/static/app/components/charts/chartZoom.tsx b/static/app/components/charts/chartZoom.tsx
index 41a3442289d397..262d856debe041 100644
--- a/static/app/components/charts/chartZoom.tsx
+++ b/static/app/components/charts/chartZoom.tsx
@@ -2,16 +2,14 @@ import {Component} from 'react';
import type {
DataZoomComponentOption,
ECharts,
- InsideDataZoomComponentOption,
ToolboxComponentOption,
XAXisComponentOption,
} from 'echarts';
-import moment from 'moment-timezone';
+import moment, {type MomentInput} from 'moment-timezone';
import * as qs from 'query-string';
import {updateDateTime} from 'sentry/actionCreators/pageFilters';
import DataZoomInside from 'sentry/components/charts/components/dataZoomInside';
-import DataZoomSlider from 'sentry/components/charts/components/dataZoomSlider';
import ToolBox from 'sentry/components/charts/components/toolBox';
import type {DateString} from 'sentry/types/core';
import type {
@@ -22,9 +20,10 @@ import type {
} from 'sentry/types/echarts';
import type {InjectedRouter} from 'sentry/types/legacyReactRouter';
import {getUtcDateString, getUtcToLocalDateObject} from 'sentry/utils/dates';
+// eslint-disable-next-line no-restricted-imports
import withSentryRouter from 'sentry/utils/withSentryRouter';
-const getDate = date =>
+const getDate = (date: MomentInput) =>
date ? moment.utc(date).format(moment.HTML5_FMT.DATETIME_LOCAL_SECONDS) : null;
type Period = {
@@ -53,7 +52,6 @@ export interface ZoomRenderProps extends Pick {
type Props = {
children: (props: ZoomRenderProps) => React.ReactNode;
- chartZoomOptions?: DataZoomComponentOption;
disabled?: boolean;
end?: DateString;
onChartReady?: EChartChartReadyHandler;
@@ -64,7 +62,6 @@ type Props = {
period?: string | null;
router?: InjectedRouter;
saveOnZoom?: boolean;
- showSlider?: boolean;
start?: DateString;
usePageDate?: boolean;
utc?: boolean | null;
@@ -242,7 +239,7 @@ class ChartZoom extends Component {
return;
}
- this.setPeriod(this.history[0]);
+ this.setPeriod(this.history[0]!);
// reset history
this.history = [];
@@ -335,8 +332,6 @@ class ChartZoom extends Component {
onChartReady: _onChartReady,
onDataZoom: _onDataZoom,
onFinished: _onFinished,
- showSlider,
- chartZoomOptions,
...props
} = this.props;
@@ -359,18 +354,9 @@ class ChartZoom extends Component {
utc,
start,
end,
- dataZoom: showSlider
- ? [
- ...DataZoomSlider({xAxisIndex, ...chartZoomOptions}),
- ...DataZoomInside({
- xAxisIndex,
- ...(chartZoomOptions as InsideDataZoomComponentOption),
- }),
- ]
- : DataZoomInside({
- xAxisIndex,
- ...(chartZoomOptions as InsideDataZoomComponentOption),
- }),
+ dataZoom: DataZoomInside({
+ xAxisIndex,
+ }),
showTimeInTooltip: true,
toolBox: ToolBox(
{},
diff --git a/static/app/components/charts/components/dataZoomSlider.tsx b/static/app/components/charts/components/dataZoomSlider.tsx
deleted file mode 100644
index 2c48aa0e50c495..00000000000000
--- a/static/app/components/charts/components/dataZoomSlider.tsx
+++ /dev/null
@@ -1,26 +0,0 @@
-import 'echarts/lib/component/dataZoomSlider';
-
-import type {SliderDataZoomComponentOption} from 'echarts';
-
-const DEFAULT: SliderDataZoomComponentOption = {
- realtime: false,
- showDetail: false,
- left: 0,
- right: 6,
- bottom: 8,
-};
-
-export default function DataZoomSlider(
- props: SliderDataZoomComponentOption | SliderDataZoomComponentOption[]
-): SliderDataZoomComponentOption[] {
- // `props` can be boolean, if so return default
- if (!props || !Array.isArray(props)) {
- const dataZoom = {
- ...DEFAULT,
- ...props,
- };
- return [dataZoom];
- }
-
- return props;
-}
diff --git a/static/app/components/charts/components/xAxis.spec.tsx b/static/app/components/charts/components/xAxis.spec.tsx
index 750772771428e9..5ac47460309657 100644
--- a/static/app/components/charts/components/xAxis.spec.tsx
+++ b/static/app/components/charts/components/xAxis.spec.tsx
@@ -9,8 +9,8 @@ jest.mock('moment-timezone', () => {
});
describe('Chart XAxis', function () {
- let axisLabelFormatter;
- let xAxisObj;
+ let axisLabelFormatter: (value: string | number, index: number) => string;
+ let xAxisObj!: ReturnType;
const props: XAxisProps = {
isGroupedByDate: true,
theme: lightTheme,
@@ -27,15 +27,16 @@ describe('Chart XAxis', function () {
utc: false,
});
- axisLabelFormatter = xAxisObj.axisLabel.formatter;
+ // @ts-expect-error formatter type is missing
+ axisLabelFormatter = xAxisObj.axisLabel!.formatter;
});
it('formats axis label for first data point', function () {
- expect(axisLabelFormatter(timestamp, 0)).toEqual('Jul 8 5:00 PM');
+ expect(axisLabelFormatter(timestamp, 0)).toBe('Jul 8 5:00 PM');
});
it('formats axis label for second data point', function () {
- expect(axisLabelFormatter(timestamp, 1)).toEqual('Jul 8 5:00 PM');
+ expect(axisLabelFormatter(timestamp, 1)).toBe('Jul 8 5:00 PM');
});
});
@@ -47,15 +48,16 @@ describe('Chart XAxis', function () {
utc: true,
});
- axisLabelFormatter = xAxisObj.axisLabel.formatter;
+ // @ts-expect-error formatter type is missing
+ axisLabelFormatter = xAxisObj.axisLabel!.formatter;
});
it('formats axis label for first data point', function () {
- expect(axisLabelFormatter(timestamp, 0)).toEqual('Jul 9 12:00 AM');
+ expect(axisLabelFormatter(timestamp, 0)).toBe('Jul 9 12:00 AM');
});
it('formats axis label for second data point', function () {
- expect(axisLabelFormatter(timestamp, 1)).toEqual('Jul 9 12:00 AM');
+ expect(axisLabelFormatter(timestamp, 1)).toBe('Jul 9 12:00 AM');
});
});
@@ -67,15 +69,16 @@ describe('Chart XAxis', function () {
period: '7d',
});
- axisLabelFormatter = xAxisObj.axisLabel.formatter;
+ // @ts-expect-error formatter type is missing
+ axisLabelFormatter = xAxisObj.axisLabel!.formatter;
});
it('formats axis label for first data point', function () {
- expect(axisLabelFormatter(timestamp, 0)).toEqual('Jul 8\n5:00 PM');
+ expect(axisLabelFormatter(timestamp, 0)).toBe('Jul 8\n5:00 PM');
});
it('formats axis label for second data point', function () {
- expect(axisLabelFormatter(timestamp, 1)).toEqual('Jul 8\n5:00 PM');
+ expect(axisLabelFormatter(timestamp, 1)).toBe('Jul 8\n5:00 PM');
});
});
});
@@ -89,15 +92,16 @@ describe('Chart XAxis', function () {
utc: false,
});
- axisLabelFormatter = xAxisObj.axisLabel.formatter;
+ // @ts-expect-error formatter type is missing
+ axisLabelFormatter = xAxisObj.axisLabel!.formatter;
});
it('formats axis label for first data point', function () {
- expect(axisLabelFormatter(timestamp, 0)).toEqual('Jul 8 5:00 PM');
+ expect(axisLabelFormatter(timestamp, 0)).toBe('Jul 8 5:00 PM');
});
it('formats axis label for second data point', function () {
- expect(axisLabelFormatter(timestamp, 1)).toEqual('5:00 PM');
+ expect(axisLabelFormatter(timestamp, 1)).toBe('5:00 PM');
});
});
@@ -109,15 +113,16 @@ describe('Chart XAxis', function () {
utc: true,
});
- axisLabelFormatter = xAxisObj.axisLabel.formatter;
+ // @ts-expect-error formatter type is missing
+ axisLabelFormatter = xAxisObj.axisLabel!.formatter;
});
it('formats axis label for first data point', function () {
- expect(axisLabelFormatter(timestamp, 0)).toEqual('Jul 9 12:00 AM');
+ expect(axisLabelFormatter(timestamp, 0)).toBe('Jul 9 12:00 AM');
});
it('formats axis label for second data point', function () {
- expect(axisLabelFormatter(timestamp, 1)).toEqual('12:00 AM');
+ expect(axisLabelFormatter(timestamp, 1)).toBe('12:00 AM');
});
});
@@ -130,15 +135,16 @@ describe('Chart XAxis', function () {
utc: true,
});
- axisLabelFormatter = xAxisObj.axisLabel.formatter;
+ // @ts-expect-error formatter type is missing
+ axisLabelFormatter = xAxisObj.axisLabel!.formatter;
});
it('formats axis label for first data point', function () {
- expect(axisLabelFormatter(timestamp, 0)).toEqual('Jul 9\n12:00 AM');
+ expect(axisLabelFormatter(timestamp, 0)).toBe('Jul 9\n12:00 AM');
});
it('formats axis label for second data point', function () {
- expect(axisLabelFormatter(timestamp, 1)).toEqual('12:00 AM');
+ expect(axisLabelFormatter(timestamp, 1)).toBe('12:00 AM');
});
});
});
diff --git a/static/app/components/charts/components/xAxis.tsx b/static/app/components/charts/components/xAxis.tsx
index e9d95a072b6a1a..528e85a9d54f7e 100644
--- a/static/app/components/charts/components/xAxis.tsx
+++ b/static/app/components/charts/components/xAxis.tsx
@@ -33,7 +33,7 @@ function XAxis({
addSecondsToTimeFormat = false,
...props
}: XAxisProps): XAXisComponentOption {
- const AxisLabelFormatter = (value: string, index: number) => {
+ const AxisLabelFormatter = (value: string | number, index: number) => {
const firstItem = index === 0;
// Always show the date of the first item. Otherwise check the interval duration
const showDate = firstItem ? true : !computeShortInterval({start, end, period});
@@ -51,7 +51,7 @@ function XAxis({
}
if (props.truncate) {
- return truncationFormatter(value, props.truncate);
+ return truncationFormatter(value as string, props.truncate);
}
return undefined;
diff --git a/static/app/components/charts/eventsAreaChart.spec.tsx b/static/app/components/charts/eventsAreaChart.spec.tsx
index 38d4b6069775e8..d0113ed85c4141 100644
--- a/static/app/components/charts/eventsAreaChart.spec.tsx
+++ b/static/app/components/charts/eventsAreaChart.spec.tsx
@@ -52,6 +52,6 @@ describe('EventsChart with legend', function () {
/>
);
expect(await screen.findByTestId('area-chart')).toBeInTheDocument();
- expect(jest.mocked(BaseChart).mock.calls[0][0].legend).toHaveProperty('data');
+ expect(jest.mocked(BaseChart).mock.calls[0]![0].legend).toHaveProperty('data');
});
});
diff --git a/static/app/components/charts/eventsChart.tsx b/static/app/components/charts/eventsChart.tsx
index 07fd7b641dfb9c..2fd37ac0b1b9f2 100644
--- a/static/app/components/charts/eventsChart.tsx
+++ b/static/app/components/charts/eventsChart.tsx
@@ -314,7 +314,7 @@ class Chart extends Component {
// Check to see if all series output types are the same. If not, then default to number.
const outputType =
new Set(Object.values(timeseriesResultsTypes)).size === 1
- ? timeseriesResultsTypes[yAxis]
+ ? timeseriesResultsTypes[yAxis]!
: 'number';
return axisLabelFormatterUsingAggregateOutputType(value, outputType);
}
@@ -607,7 +607,7 @@ class EventsChart extends Component {
additionalSeries={additionalSeries}
previousSeriesTransformer={previousSeriesTransformer}
stacked={this.isStacked()}
- yAxis={yAxisArray[0]}
+ yAxis={yAxisArray[0]!}
showDaily={showDaily}
colors={colors}
legendOptions={legendOptions}
diff --git a/static/app/components/charts/eventsRequest.tsx b/static/app/components/charts/eventsRequest.tsx
index 2462b5251c5793..f5f119ed5340ab 100644
--- a/static/app/components/charts/eventsRequest.tsx
+++ b/static/app/components/charts/eventsRequest.tsx
@@ -425,7 +425,7 @@ class EventsRequest extends PureComponent current[i][0] * 1000
+ (_timestamp, _countArray, i) => current[i]![0] * 1000
),
stack: 'previous',
};
@@ -573,7 +573,7 @@ class EventsRequest extends PureComponent {
- const seriesData: EventsStats = timeseriesData[seriesName];
+ const seriesData: EventsStats = timeseriesData[seriesName]!;
const processedData = this.processData(
seriesData,
index,
@@ -589,7 +589,7 @@ class EventsRequest extends PureComponent a[0] - b[0]);
const timeseriesResultsTypes: Record = {};
Object.keys(timeseriesData).forEach(key => {
- const fieldsMeta = timeseriesData[key].meta?.fields[getAggregateAlias(key)];
+ const fieldsMeta = timeseriesData[key]!.meta?.fields[getAggregateAlias(key)];
if (fieldsMeta) {
timeseriesResultsTypes[key] = fieldsMeta;
}
diff --git a/static/app/components/charts/intervalSelector.spec.tsx b/static/app/components/charts/intervalSelector.spec.tsx
index 33924644e0a9bf..8e1a0674f3338c 100644
--- a/static/app/components/charts/intervalSelector.spec.tsx
+++ b/static/app/components/charts/intervalSelector.spec.tsx
@@ -29,7 +29,7 @@ describe('IntervalSelector', function () {
/>
);
render(intervalSelector);
- expect(interval).toEqual('4h');
+ expect(interval).toBe('4h');
});
it('resets large interval', function () {
eventView.interval = '1h';
@@ -42,7 +42,7 @@ describe('IntervalSelector', function () {
/>
);
render(intervalSelector);
- expect(eventView.interval).toEqual('1m');
+ expect(eventView.interval).toBe('1m');
});
it('leaves default interval alone', function () {
eventView.interval = undefined;
@@ -56,6 +56,6 @@ describe('IntervalSelector', function () {
/>
);
render(intervalSelector);
- expect(interval).toEqual('not called');
+ expect(interval).toBe('not called');
});
});
diff --git a/static/app/components/charts/intervalSelector.tsx b/static/app/components/charts/intervalSelector.tsx
index 7a39386164b68e..34c35c8e2a0b71 100644
--- a/static/app/components/charts/intervalSelector.tsx
+++ b/static/app/components/charts/intervalSelector.tsx
@@ -123,12 +123,12 @@ function formatHoursToInterval(hours: number): [number, IntervalUnits] {
function getIntervalOption(rangeHours: number): IntervalOption {
for (const index in INTERVAL_OPTIONS) {
- const currentOption = INTERVAL_OPTIONS[index];
+ const currentOption = INTERVAL_OPTIONS[index]!;
if (currentOption.rangeStart <= rangeHours) {
return currentOption;
}
}
- return INTERVAL_OPTIONS[0];
+ return INTERVAL_OPTIONS[0]!;
}
function bindInterval(
@@ -196,7 +196,7 @@ export default function IntervalSelector({
makeItem(
amount,
unit,
- SUPPORTED_RELATIVE_PERIOD_UNITS[unit].label,
+ SUPPORTED_RELATIVE_PERIOD_UNITS[unit]!.label,
results.length + 1
)
);
diff --git a/static/app/components/charts/miniBarChart.tsx b/static/app/components/charts/miniBarChart.tsx
index 86549632b8a681..5125390e7724da 100644
--- a/static/app/components/charts/miniBarChart.tsx
+++ b/static/app/components/charts/miniBarChart.tsx
@@ -256,7 +256,7 @@ function MiniBarChart({
: [theme.gray200, theme.purple300, theme.purple300];
for (let i = 0; i < series.length; i++) {
- const original = series[i];
+ const original = series[i]!;
const updated: BarChartSeries = {
...original,
cursor: 'normal',
diff --git a/static/app/components/charts/optionSelector.spec.tsx b/static/app/components/charts/optionSelector.spec.tsx
index c71fd82d17fba2..7b85e09c90ba4f 100644
--- a/static/app/components/charts/optionSelector.spec.tsx
+++ b/static/app/components/charts/optionSelector.spec.tsx
@@ -5,7 +5,6 @@ import {initializeOrg} from 'sentry-test/initializeOrg';
import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
import OptionSelector from 'sentry/components/charts/optionSelector';
-import {t} from 'sentry/locale';
describe('Charts > OptionSelector (Multiple)', function () {
const features = ['discover-basic'];
@@ -32,7 +31,7 @@ describe('Charts > OptionSelector (Multiple)', function () {
{
diff --git a/static/app/components/charts/percentageAreaChart.tsx b/static/app/components/charts/percentageAreaChart.tsx
index ac9916724a8eab..3697f14be07cfd 100644
--- a/static/app/components/charts/percentageAreaChart.tsx
+++ b/static/app/components/charts/percentageAreaChart.tsx
@@ -43,9 +43,9 @@ export default class PercentageAreaChart extends Component {
const {series, getDataItemName, getValue} = this.props;
const totalsArray: [string | number, number][] = series.length
- ? series[0].data.map(({name}, i) => [
+ ? series[0]!.data.map(({name}, i) => [
name,
- series.reduce((sum, {data}) => sum + data[i].value, 0),
+ series.reduce((sum, {data}) => sum + data[i]!.value, 0),
])
: [];
const totals = new Map(totalsArray);
diff --git a/static/app/components/charts/pieChart.tsx b/static/app/components/charts/pieChart.tsx
index 94dd202b984b56..0d80739112e579 100644
--- a/static/app/components/charts/pieChart.tsx
+++ b/static/app/components/charts/pieChart.tsx
@@ -77,7 +77,7 @@ class PieChart extends Component {
.reduce(
(acc, [name, value]) => ({
...acc,
- [name]: value,
+ [name!]: value,
}),
{}
);
@@ -95,7 +95,7 @@ class PieChart extends Component {
// Note, we only take the first series unit!
const [firstSeries] = series;
- const seriesPercentages = this.getSeriesPercentages(firstSeries);
+ const seriesPercentages = this.getSeriesPercentages(firstSeries!);
return (
{
if (
!this.isInitialSelected ||
!name ||
- firstSeries.data[this.selected].name === name
+ firstSeries!.data[this.selected]!.name === name
) {
return;
}
@@ -159,8 +159,8 @@ class PieChart extends Component {
}}
series={[
PieSeries({
- name: firstSeries.seriesName,
- data: firstSeries.data,
+ name: firstSeries!.seriesName,
+ data: firstSeries!.data,
avoidLabelOverlap: false,
label: {
formatter: ({name, percent}) => `${name}\n${percent}%`,
diff --git a/static/app/components/charts/releaseSeries.spec.tsx b/static/app/components/charts/releaseSeries.spec.tsx
index 22151a9547547c..5e8cb52cc02143 100644
--- a/static/app/components/charts/releaseSeries.spec.tsx
+++ b/static/app/components/charts/releaseSeries.spec.tsx
@@ -237,8 +237,8 @@ describe('ReleaseSeries', function () {
);
- await waitFor(() => expect(screen.getByText('Series 1')).toBeInTheDocument());
- await waitFor(() => expect(screen.getByText('Series 2')).toBeInTheDocument());
+ await screen.findByText('Series 1');
+ await screen.findByText('Series 2');
await waitFor(() => expect(releasesMock).toHaveBeenCalledTimes(1));
});
diff --git a/static/app/components/charts/releaseSeries.tsx b/static/app/components/charts/releaseSeries.tsx
index b126c57ee68cc4..9fc3285d9acaf5 100644
--- a/static/app/components/charts/releaseSeries.tsx
+++ b/static/app/components/charts/releaseSeries.tsx
@@ -175,7 +175,7 @@ class ReleaseSeries extends Component {
if (pageLinks) {
const paginationObject = parseLinkHeader(pageLinks);
hasMore = paginationObject?.next?.results ?? false;
- conditions.cursor = paginationObject.next.cursor;
+ conditions.cursor = paginationObject.next!.cursor;
} else {
hasMore = false;
}
diff --git a/static/app/components/charts/useChartZoom.tsx b/static/app/components/charts/useChartZoom.tsx
index 99a3d5deeb5e91..104cb5996af197 100644
--- a/static/app/components/charts/useChartZoom.tsx
+++ b/static/app/components/charts/useChartZoom.tsx
@@ -1,15 +1,9 @@
import {useCallback, useEffect, useMemo, useRef} from 'react';
-import type {
- DataZoomComponentOption,
- ECharts,
- InsideDataZoomComponentOption,
- ToolboxComponentOption,
-} from 'echarts';
+import type {DataZoomComponentOption, ECharts, ToolboxComponentOption} from 'echarts';
import * as qs from 'query-string';
import {updateDateTime} from 'sentry/actionCreators/pageFilters';
import DataZoomInside from 'sentry/components/charts/components/dataZoomInside';
-import DataZoomSlider from 'sentry/components/charts/components/dataZoomSlider';
import ToolBox from 'sentry/components/charts/components/toolBox';
import type {DateString} from 'sentry/types/core';
import type {
@@ -43,7 +37,6 @@ interface ZoomRenderProps {
interface Props {
children: (props: ZoomRenderProps) => React.ReactNode;
- chartZoomOptions?: DataZoomComponentOption;
/**
* Disables saving changes to the current period
*/
@@ -54,7 +47,6 @@ interface Props {
* Will persist zoom state to page filters
*/
saveOnZoom?: boolean;
- showSlider?: boolean;
/**
* Use either `saveOnZoom` or `usePageDate` not both
* Persists zoom state to query params without updating page filters.
@@ -135,8 +127,6 @@ export function useChartZoom({
usePageDate,
saveOnZoom,
xAxisIndex,
- showSlider,
- chartZoomOptions,
}: Omit): ZoomRenderProps {
const {handleChartReady} = useChartZoomCancel();
const location = useLocation();
@@ -256,12 +246,9 @@ export function useChartZoom({
const dataZoomProp = useMemo(() => {
const zoomInside = DataZoomInside({
xAxisIndex,
- ...(chartZoomOptions as InsideDataZoomComponentOption),
});
- return showSlider
- ? [...DataZoomSlider({xAxisIndex, ...chartZoomOptions}), ...zoomInside]
- : zoomInside;
- }, [chartZoomOptions, showSlider, xAxisIndex]);
+ return zoomInside;
+ }, [xAxisIndex]);
const toolBox = useMemo(
() =>
diff --git a/static/app/components/charts/utils.spec.tsx b/static/app/components/charts/utils.spec.tsx
index 9ed4973e9656bc..80e775c90b4e6e 100644
--- a/static/app/components/charts/utils.spec.tsx
+++ b/static/app/components/charts/utils.spec.tsx
@@ -111,19 +111,19 @@ describe('Chart Utils', function () {
]);
it('handles negative intervals', function () {
- expect(ladder.getInterval(-1)).toEqual('15m');
+ expect(ladder.getInterval(-1)).toBe('15m');
});
it('finds granularity at lower bound', function () {
- expect(ladder.getInterval(getDiffInMinutes({period: '2m'}))).toEqual('15m');
+ expect(ladder.getInterval(getDiffInMinutes({period: '2m'}))).toBe('15m');
});
it('finds granularity between bounds', function () {
- expect(ladder.getInterval(getDiffInMinutes({period: '3d'}))).toEqual('30m');
+ expect(ladder.getInterval(getDiffInMinutes({period: '3d'}))).toBe('30m');
});
it('finds granularity at upper bound', function () {
- expect(ladder.getInterval(getDiffInMinutes({period: '60d'}))).toEqual('1d');
+ expect(ladder.getInterval(getDiffInMinutes({period: '60d'}))).toBe('1d');
});
});
diff --git a/static/app/components/charts/utils.tsx b/static/app/components/charts/utils.tsx
index ac03e159a78a1c..66c3c3b5461718 100644
--- a/static/app/components/charts/utils.tsx
+++ b/static/app/components/charts/utils.tsx
@@ -273,7 +273,7 @@ export const getDimensionValue = (dimension?: number | string | null) => {
};
const RGB_LIGHTEN_VALUE = 30;
-export const lightenHexToRgb = (colors: string[]) =>
+export const lightenHexToRgb = (colors: ReadonlyArray) =>
colors.map(hex => {
const rgb = [
Math.min(parseInt(hex.slice(1, 3), 16) + RGB_LIGHTEN_VALUE, 255),
@@ -292,7 +292,7 @@ export const processTableResults = (tableResults?: TableDataWithTitle[]) => {
return DEFAULT_GEO_DATA;
}
- const tableResult = tableResults[0];
+ const tableResult = tableResults[0]!;
const {data} = tableResult;
@@ -300,7 +300,7 @@ export const processTableResults = (tableResults?: TableDataWithTitle[]) => {
return DEFAULT_GEO_DATA;
}
- const preAggregate = Object.keys(data[0]).find(column => {
+ const preAggregate = Object.keys(data[0]!).find(column => {
return column !== 'geo.country_code';
});
@@ -309,7 +309,7 @@ export const processTableResults = (tableResults?: TableDataWithTitle[]) => {
}
return {
- title: tableResult.title ?? '',
+ title: tableResult!.title ?? '',
data: data.map(row => {
return {
name: row['geo.country_code'] as string,
diff --git a/static/app/components/chevron.tsx b/static/app/components/chevron.tsx
index d80b62050d3e22..dfa9f19cfae203 100644
--- a/static/app/components/chevron.tsx
+++ b/static/app/components/chevron.tsx
@@ -34,7 +34,7 @@ function getPath(direction: NonNullable) {
[3.5, 5.5],
[7, 9],
[10.5, 5.5],
- ];
+ ] as const;
switch (direction) {
case 'right':
diff --git a/static/app/components/collapsible.spec.tsx b/static/app/components/collapsible.spec.tsx
index 5abc539696c81a..80ab6db5a0864f 100644
--- a/static/app/components/collapsible.spec.tsx
+++ b/static/app/components/collapsible.spec.tsx
@@ -10,7 +10,7 @@ describe('Collapsible', function () {
render({items} );
expect(screen.getAllByText(/Item/)).toHaveLength(5);
- expect(screen.getAllByText(/Item/)[2].innerHTML).toBe('Item 3');
+ expect(screen.getAllByText(/Item/)[2]!.innerHTML).toBe('Item 3');
expect(screen.getByLabelText('Show 2 hidden items')).toBeInTheDocument();
expect(screen.queryByLabelText('Collapse')).not.toBeInTheDocument();
diff --git a/static/app/components/compactSelect/control.tsx b/static/app/components/compactSelect/control.tsx
index 141d148d7a4f77..77423cf6ee32d8 100644
--- a/static/app/components/compactSelect/control.tsx
+++ b/static/app/components/compactSelect/control.tsx
@@ -524,9 +524,9 @@ export function Control({
>
({
disallowEmptySelection: disallowEmptySelection ?? true,
allowDuplicateSelectionEvents: true,
onSelectionChange: selection => {
- const selectedOption = getSelectedOptions(items, selection)[0] ?? null;
+ const selectedOption = getSelectedOptions(items, selection)[0]! ?? null;
// Save selected options in SelectContext, to update the trigger label
saveSelectedOptions(compositeIndex, selectedOption);
onChange?.(selectedOption);
diff --git a/static/app/components/compactSelect/utils.tsx b/static/app/components/compactSelect/utils.tsx
index 768080ccf0f88a..b5f80ae9c6ec67 100644
--- a/static/app/components/compactSelect/utils.tsx
+++ b/static/app/components/compactSelect/utils.tsx
@@ -152,7 +152,7 @@ export function getHiddenOptions(
let currentIndex = 0;
while (currentIndex < remainingItems.length) {
- const item = remainingItems[currentIndex];
+ const item = remainingItems[currentIndex]!;
const delta = 'options' in item ? item.options.length : 1;
if (accumulator + delta > limit) {
@@ -164,12 +164,12 @@ export function getHiddenOptions(
currentIndex += 1;
}
- for (let i = threshold[0]; i < remainingItems.length; i++) {
- const item = remainingItems[i];
+ for (let i = threshold[0]!; i < remainingItems.length; i++) {
+ const item = remainingItems[i]!;
if ('options' in item) {
- const startingIndex = i === threshold[0] ? threshold[1] : 0;
+ const startingIndex = i === threshold[0] ? threshold[1]! : 0;
for (let j = startingIndex; j < item.options.length; j++) {
- hiddenOptionsSet.add(item.options[j].key);
+ hiddenOptionsSet.add(item.options[j]!.key);
}
} else {
hiddenOptionsSet.add(item.key);
diff --git a/static/app/components/confirm.stories.tsx b/static/app/components/confirm.stories.tsx
index a079c3788d2d1a..6d51656f92bfe2 100644
--- a/static/app/components/confirm.stories.tsx
+++ b/static/app/components/confirm.stories.tsx
@@ -1,5 +1,3 @@
-/* eslint-disable no-console */
-
import {Fragment, useState} from 'react';
import styled from '@emotion/styled';
diff --git a/static/app/components/contextPickerModal.spec.tsx b/static/app/components/contextPickerModal.spec.tsx
index 06bc498defb7b0..3fc5eddd801c7f 100644
--- a/static/app/components/contextPickerModal.spec.tsx
+++ b/static/app/components/contextPickerModal.spec.tsx
@@ -65,6 +65,7 @@ describe('ContextPickerModal', function () {
render(getComponent());
expect(screen.getByText('Select an Organization')).toBeInTheDocument();
+ expect(screen.getByRole('textbox')).toHaveFocus();
expect(screen.queryByText('Select a Project to continue')).not.toBeInTheDocument();
});
@@ -102,8 +103,8 @@ describe('ContextPickerModal', function () {
await waitFor(() => {
expect(fetchProjectsForOrg).toHaveBeenCalled();
- expect(onFinish).toHaveBeenLastCalledWith('/test/org2/path/project2/');
});
+ expect(onFinish).toHaveBeenLastCalledWith('/test/org2/path/project2/');
});
it('selects an org and calls `onFinish` with URL with organization slug', async function () {
@@ -144,6 +145,7 @@ describe('ContextPickerModal', function () {
// Should see 1 selected, and 1 as an option
expect(screen.getAllByText('org-slug')).toHaveLength(2);
+ expect(screen.getByRole('textbox')).toHaveFocus();
expect(await screen.findByText('My Projects')).toBeInTheDocument();
expect(screen.getByText(project.slug)).toBeInTheDocument();
expect(screen.getByText(project2.slug)).toBeInTheDocument();
@@ -164,7 +166,7 @@ describe('ContextPickerModal', function () {
];
const fetchProjectsForOrg = MockApiClient.addMockResponse({
url: `/organizations/${org2.slug}/projects/`,
- body: organizations[1].projects,
+ body: organizations[1]!.projects,
});
OrganizationsStore.load(organizations);
@@ -180,6 +182,7 @@ describe('ContextPickerModal', function () {
// Should not have anything selected
expect(screen.getByText('Select an Organization')).toBeInTheDocument();
+ expect(screen.getByRole('textbox')).toHaveFocus();
// Select org2
await selectEvent.select(screen.getByText('Select an Organization'), org2.slug);
diff --git a/static/app/components/contextPickerModal.tsx b/static/app/components/contextPickerModal.tsx
index 4e5cf53c3f6b53..bb52bad7ba9363 100644
--- a/static/app/components/contextPickerModal.tsx
+++ b/static/app/components/contextPickerModal.tsx
@@ -1,5 +1,4 @@
import {Component, Fragment} from 'react';
-import {findDOMNode} from 'react-dom';
import {components} from 'react-select';
import styled from '@emotion/styled';
import type {Query} from 'history';
@@ -69,6 +68,10 @@ type Props = ModalRenderProps & {
allowAllProjectsSelection?: boolean;
};
+function autoFocusReactSelect(reactSelectRef: any) {
+ reactSelectRef?.select?.focus?.();
+}
+
const selectStyles: StylesConfig = {
menu: provided => ({
...provided,
@@ -116,12 +119,6 @@ class ContextPickerModal extends Component {
onFinishTimeout: number | undefined = undefined;
- // TODO(ts) The various generics in react-select types make getting this
- // right hard.
- orgSelect: any | null = null;
- projectSelect: any | null = null;
- configSelect: any | null = null;
-
// Performs checks to see if we need to prompt user
// i.e. When there is only 1 org and no project is needed or
// there is only 1 org and only 1 project (which should be rare)
@@ -150,7 +147,7 @@ class ContextPickerModal extends Component {
// If there is only one org and we don't need a project slug, then call finish callback
if (!needProject) {
const newPathname = replaceRouterParams(pathname, {
- orgId: organizations[0].slug,
+ orgId: organizations[0]!.slug,
});
this.onFinishTimeout =
onFinish(
@@ -164,13 +161,13 @@ class ContextPickerModal extends Component {
// Use latest org or if only 1 org, use that
let org = latestOrg;
if (!org && organizations.length === 1) {
- org = organizations[0].slug;
+ org = organizations[0]!.slug;
}
const newPathname = replaceRouterParams(pathname, {
orgId: org,
- projectId: projects[0].slug,
- project: this.props.projects.find(p => p.slug === projects[0].slug)?.id,
+ projectId: projects[0]!.slug,
+ project: this.props.projects.find(p => p.slug === projects[0]!.slug)?.id,
});
this.onFinishTimeout =
onFinish(
@@ -178,21 +175,6 @@ class ContextPickerModal extends Component {
) ?? undefined;
};
- doFocus = (ref: any | null) => {
- if (!ref || this.props.loading) {
- return;
- }
-
- // eslint-disable-next-line react/no-find-dom-node
- const el = findDOMNode(ref) as HTMLElement;
-
- if (el !== null) {
- const input = el.querySelector('input');
-
- input?.focus();
- }
- };
-
handleSelectOrganization = ({value}: {value: string}) => {
// If we do not need to select a project, we can early return after selecting an org
// No need to fetch org details
@@ -286,7 +268,7 @@ class ContextPickerModal extends Component {
const projectOptions = [
{
label: t('My Projects'),
- options: memberProjects.map(p => ({
+ options: memberProjects!.map(p => ({
value: p.slug,
label: p.slug,
disabled: false,
@@ -294,7 +276,7 @@ class ContextPickerModal extends Component {
},
{
label: t('All Projects'),
- options: nonMemberProjects.map(p => ({
+ options: nonMemberProjects!.map(p => ({
value: p.slug,
label: p.slug,
disabled: allowAllProjectsSelection ? false : !isSuperuser,
@@ -316,10 +298,7 @@ class ContextPickerModal extends Component {
return (
{
- this.projectSelect = ref;
- this.doFocus(this.projectSelect);
- }}
+ ref={autoFocusReactSelect}
placeholder={t('Select a Project to continue')}
name="project"
options={projectOptions}
@@ -338,7 +317,7 @@ class ContextPickerModal extends Component {
const options = [
{
label: tct('[providerName] Configurations', {
- providerName: integrationConfigs[0].provider.name,
+ providerName: integrationConfigs[0]!.provider.name,
}),
options: integrationConfigs.map(config => ({
value: config.id,
@@ -354,10 +333,7 @@ class ContextPickerModal extends Component {
];
return (
{
- this.configSelect = ref;
- this.doFocus(this.configSelect);
- }}
+ ref={autoFocusReactSelect}
placeholder={t('Select a configuration to continue')}
name="configurations"
options={options}
@@ -398,18 +374,14 @@ class ContextPickerModal extends Component {
return (
-
+
{loading && }
{needOrg && (
{
- this.orgSelect = ref;
- if (shouldShowProjectSelector) {
- return;
- }
- this.doFocus(this.orgSelect);
- }}
+ ref={shouldShowProjectSelector ? undefined : autoFocusReactSelect}
placeholder={t('Select an Organization')}
name="organization"
options={orgChoices}
diff --git a/static/app/components/customIgnoreCountModal.tsx b/static/app/components/customIgnoreCountModal.tsx
index 5950284083794f..3369481c666645 100644
--- a/static/app/components/customIgnoreCountModal.tsx
+++ b/static/app/components/customIgnoreCountModal.tsx
@@ -1,4 +1,4 @@
-import {Component, Fragment} from 'react';
+import {Fragment, useState} from 'react';
import type {ModalRenderProps} from 'sentry/actionCreators/modal';
import {Button} from 'sentry/components/button';
@@ -21,80 +21,70 @@ type Props = ModalRenderProps & {
windowOptions: SelectValue[];
};
-type State = {
- count: number;
- window: number | null;
-};
-
-class CustomIgnoreCountModal extends Component {
- state: State = {
- count: 100,
- window: null,
- };
-
- handleSubmit = () => {
- const {count, window} = this.state;
- const {countName, windowName} = this.props;
+export default function CustomIgnoreCountModal(props: Props) {
+ const [count, setCount] = useState(100);
+ const [window, setWindow] = useState(null);
+ const {
+ Header,
+ Footer,
+ Body,
+ countLabel,
+ label,
+ windowOptions,
+ countName,
+ windowName,
+ onSelected,
+ closeModal,
+ } = props;
+ const handleSubmit = () => {
const statusDetails: IgnoredStatusDetails = {[countName]: count};
if (window) {
statusDetails[windowName] = window;
}
- this.props.onSelected(statusDetails);
- this.props.closeModal();
+ onSelected(statusDetails);
+ closeModal();
};
- handleChange = (name: keyof State, value: number) => {
- this.setState({[name]: value} as State);
- };
-
- render() {
- const {Header, Footer, Body, countLabel, label, closeModal, windowOptions} =
- this.props;
- const {count, window} = this.state;
-
- return (
-
-
-
- this.handleChange('count' as const, Number(val))}
- required
- placeholder={t('e.g. 100')}
- />
- this.handleChange('window' as const, val)}
- options={windowOptions}
- placeholder={t('e.g. per hour')}
- allowClear
- help={t('(Optional) If supplied, this rule will apply as a rate of change.')}
- />
-
-
-
- {t('Cancel')}
-
- {t('Ignore')}
-
-
-
-
- );
- }
+ return (
+
+
+
+
+
+
+
+
+ {t('Cancel')}
+
+ {t('Ignore')}
+
+
+
+
+ );
}
-
-export default CustomIgnoreCountModal;
diff --git a/static/app/components/customIgnoreDurationModal.tsx b/static/app/components/customIgnoreDurationModal.tsx
index e7ba5952034c0a..6fafa5feb1b559 100644
--- a/static/app/components/customIgnoreDurationModal.tsx
+++ b/static/app/components/customIgnoreDurationModal.tsx
@@ -1,4 +1,4 @@
-import {Component, createRef, Fragment} from 'react';
+import {Fragment, useRef, useState} from 'react';
import moment from 'moment-timezone';
import {sprintf} from 'sprintf-js';
@@ -9,32 +9,22 @@ import ButtonBar from 'sentry/components/buttonBar';
import {t} from 'sentry/locale';
import type {IgnoredStatusDetails} from 'sentry/types/group';
-const defaultProps = {
- label: t('Ignore this issue until \u2026'),
-};
-
type Props = ModalRenderProps & {
onSelected: (details: IgnoredStatusDetails) => void;
-} & typeof defaultProps;
-
-type State = {
- dateWarning: boolean;
};
-export default class CustomIgnoreDurationModal extends Component {
- static defaultProps = defaultProps;
-
- state: State = {
- dateWarning: false,
- };
+export default function CustomIgnoreDurationModal(props: Props) {
+ const [dateWarning, setDateWarning] = useState(false);
+ const {Header, Body, Footer, onSelected, closeModal} = props;
+ const label = t('Ignore this issue until \u2026');
- snoozeDateInputRef = createRef();
+ const snoozeDateInputRef = useRef(null);
- snoozeTimeInputRef = createRef();
+ const snoozeTimeInputRef = useRef(null);
- selectedIgnoreMinutes = () => {
- const dateStr = this.snoozeDateInputRef.current?.value; // YYYY-MM-DD
- const timeStr = this.snoozeTimeInputRef.current?.value; // HH:MM
+ const selectedIgnoreMinutes = () => {
+ const dateStr = snoozeDateInputRef.current?.value; // YYYY-MM-DD
+ const timeStr = snoozeTimeInputRef.current?.value; // HH:MM
if (dateStr && timeStr) {
const selectedDate = moment.utc(dateStr + ' ' + timeStr);
if (selectedDate.isValid()) {
@@ -45,86 +35,80 @@ export default class CustomIgnoreDurationModal extends Component {
return 0;
};
- snoozeClicked = () => {
- const minutes = this.selectedIgnoreMinutes();
+ const snoozeClicked = () => {
+ const minutes = selectedIgnoreMinutes();
if (minutes <= 0) {
- this.setState({
- dateWarning: minutes <= 0,
- });
-
+ setDateWarning(minutes <= 0);
return;
}
- this.props.onSelected({ignoreDuration: minutes});
- this.props.closeModal();
+ onSelected({ignoreDuration: minutes});
+ closeModal();
};
- render() {
- // Give the user a sane starting point to select a date
- // (prettier than the empty date/time inputs):
- const defaultDate = new Date();
- defaultDate.setDate(defaultDate.getDate() + 14);
- defaultDate.setSeconds(0);
- defaultDate.setMilliseconds(0);
-
- const defaultDateVal = sprintf(
- '%d-%02d-%02d',
- defaultDate.getUTCFullYear(),
- defaultDate.getUTCMonth() + 1,
- defaultDate.getUTCDate()
- );
-
- const defaultTimeVal = sprintf('%02d:00', defaultDate.getUTCHours());
- const {Header, Body, Footer, label} = this.props;
-
- return (
-
-
-
-
-
- {this.state.dateWarning && (
-
- {t('Please enter a valid date in the future')}
-
- )}
-
-
-
- {t('Cancel')}
-
-
- {t('Ignore')}
-
-
-
-
- );
- }
+ // Give the user a sane starting point to select a date
+ // (prettier than the empty date/time inputs):
+ const defaultDate = new Date();
+ defaultDate.setDate(defaultDate.getDate() + 14);
+ defaultDate.setSeconds(0);
+ defaultDate.setMilliseconds(0);
+
+ const defaultDateVal = sprintf(
+ '%d-%02d-%02d',
+ defaultDate.getUTCFullYear(),
+ defaultDate.getUTCMonth() + 1,
+ defaultDate.getUTCDate()
+ );
+
+ const defaultTimeVal = sprintf('%02d:00', defaultDate.getUTCHours());
+
+ return (
+
+
+
+
+
+ {dateWarning && (
+
+ {t('Please enter a valid date in the future')}
+
+ )}
+
+
+
+ {t('Cancel')}
+
+
+ {t('Ignore')}
+
+
+
+
+ );
}
diff --git a/static/app/components/deprecatedAssigneeSelector.spec.tsx b/static/app/components/deprecatedAssigneeSelector.spec.tsx
index c11f5d77679d8b..ad359ce728da1d 100644
--- a/static/app/components/deprecatedAssigneeSelector.spec.tsx
+++ b/static/app/components/deprecatedAssigneeSelector.spec.tsx
@@ -349,7 +349,7 @@ describe('DeprecatedAssigneeSelector', () => {
const options = screen.getAllByTestId('assignee-option');
expect(options[5]).toHaveTextContent('JD');
- await userEvent.click(options[4]);
+ await userEvent.click(options[4]!);
await waitFor(() => {
expect(addMessageSpy).toHaveBeenCalledWith(
@@ -379,7 +379,7 @@ describe('DeprecatedAssigneeSelector', () => {
const options = screen.getAllByTestId('assignee-option');
// Suggested assignee initials
expect(options[0]).toHaveTextContent('JB');
- await userEvent.click(options[0]);
+ await userEvent.click(options[0]!);
await waitFor(() =>
expect(assignGroup2Mock).toHaveBeenCalledWith(
diff --git a/static/app/components/deprecatedAssigneeSelector.tsx b/static/app/components/deprecatedAssigneeSelector.tsx
index 4a9f9427693b60..029ec7ae6ce8e6 100644
--- a/static/app/components/deprecatedAssigneeSelector.tsx
+++ b/static/app/components/deprecatedAssigneeSelector.tsx
@@ -72,6 +72,7 @@ export function AssigneeAvatar({
}
if (suggestedActors.length > 0) {
+ const firstActor = suggestedActors[0]!;
return (
{tct('Suggestion: [name]', {
name:
- suggestedActors[0].type === 'team'
- ? `#${suggestedActors[0].name}`
- : suggestedActors[0].name,
+ firstActor.type === 'team' ? `#${firstActor.name}` : firstActor.name,
})}
{suggestedActors.length > 1 &&
tn(' + %s other', ' + %s others', suggestedActors.length - 1)}
- {suggestedReasons[suggestedActors[0].suggestedReason]}
+ {suggestedReasons[firstActor.suggestedReason]}
}
diff --git a/static/app/components/deprecatedAssigneeSelectorDropdown.tsx b/static/app/components/deprecatedAssigneeSelectorDropdown.tsx
index 0e2b351b38343e..66f34f86fe65d8 100644
--- a/static/app/components/deprecatedAssigneeSelectorDropdown.tsx
+++ b/static/app/components/deprecatedAssigneeSelectorDropdown.tsx
@@ -370,7 +370,7 @@ export class DeprecatedAssigneeSelectorDropdown extends Component<
)[] = [];
for (let i = 0; i < suggestedAssignees.length; i++) {
- const assignee = suggestedAssignees[i];
+ const assignee = suggestedAssignees[i]!;
if (assignee.type !== 'user' && assignee.type !== 'team') {
continue;
}
@@ -514,7 +514,7 @@ export class DeprecatedAssigneeSelectorDropdown extends Component<
const member = memberList.find(user => user.id === id);
if (member) {
return {
- id,
+ id: id!,
type: 'user',
name: member.name,
suggestedReason: owner.type,
@@ -528,7 +528,7 @@ export class DeprecatedAssigneeSelectorDropdown extends Component<
);
if (matchingTeam) {
return {
- id,
+ id: id!,
type: 'team',
name: matchingTeam.team.name,
suggestedReason: owner.type,
@@ -606,7 +606,7 @@ export function putSessionUserFirst(members: User[] | undefined): User[] {
return members;
}
- const arrangedMembers = [members[sessionUserIndex]].concat(
+ const arrangedMembers = [members[sessionUserIndex]!].concat(
members.slice(0, sessionUserIndex),
members.slice(sessionUserIndex + 1)
);
diff --git a/static/app/components/deprecatedDropdownMenu.spec.tsx b/static/app/components/deprecatedDropdownMenu.spec.tsx
index e2eee008a9b667..75d19e0c10fd26 100644
--- a/static/app/components/deprecatedDropdownMenu.spec.tsx
+++ b/static/app/components/deprecatedDropdownMenu.spec.tsx
@@ -113,7 +113,7 @@ describe('dropdownMenuDeprecated', function () {
await userEvent.click(screen.getByTestId('menu'));
expect(menuClick).toHaveBeenCalled();
- expect(screen.queryByRole('listbox')).toBeInTheDocument();
+ expect(screen.getByRole('listbox')).toBeInTheDocument();
});
it('always rendered menus should attach document event listeners only when opened', async function () {
diff --git a/static/app/components/smartSearchBar/actionButton.tsx b/static/app/components/deprecatedSmartSearchBar/actionButton.tsx
similarity index 90%
rename from static/app/components/smartSearchBar/actionButton.tsx
rename to static/app/components/deprecatedSmartSearchBar/actionButton.tsx
index 8acd2c4f9f768d..41778b8d831272 100644
--- a/static/app/components/smartSearchBar/actionButton.tsx
+++ b/static/app/components/deprecatedSmartSearchBar/actionButton.tsx
@@ -1,4 +1,3 @@
-// eslint-disable-next-line no-restricted-imports
import styled from '@emotion/styled';
import {Button} from 'sentry/components/button';
diff --git a/static/app/components/smartSearchBar/index.spec.tsx b/static/app/components/deprecatedSmartSearchBar/index.spec.tsx
similarity index 89%
rename from static/app/components/smartSearchBar/index.spec.tsx
rename to static/app/components/deprecatedSmartSearchBar/index.spec.tsx
index fa0862cd53552c..b9a924d8796865 100644
--- a/static/app/components/smartSearchBar/index.spec.tsx
+++ b/static/app/components/deprecatedSmartSearchBar/index.spec.tsx
@@ -11,7 +11,7 @@ import {
waitFor,
} from 'sentry-test/reactTestingLibrary';
-import {SmartSearchBar} from 'sentry/components/smartSearchBar';
+import {DeprecatedSmartSearchBar} from 'sentry/components/deprecatedSmartSearchBar';
import TagStore from 'sentry/stores/tagStore';
import {FieldKey} from 'sentry/utils/fields';
@@ -68,7 +68,9 @@ describe('SmartSearchBar', function () {
.fn()
.mockResolvedValue(['this is filled with spaces']);
- render( );
+ render(
+
+ );
const textbox = screen.getByRole('textbox');
await userEvent.click(textbox);
@@ -86,7 +88,9 @@ describe('SmartSearchBar', function () {
.fn()
.mockResolvedValue(['this " is " filled " with " quotes']);
- render( );
+ render(
+
+ );
const textbox = screen.getByRole('textbox');
await userEvent.click(textbox);
@@ -102,7 +106,7 @@ describe('SmartSearchBar', function () {
it('does not search when pressing enter on a tag without a value', async function () {
const onSearchMock = jest.fn();
- render( );
+ render( );
const textbox = screen.getByRole('textbox');
await userEvent.type(textbox, 'browser:{enter}');
@@ -113,7 +117,7 @@ describe('SmartSearchBar', function () {
it('autocompletes value with tab', async function () {
const onSearchMock = jest.fn();
- render( );
+ render( );
const textbox = screen.getByRole('textbox');
await userEvent.type(textbox, 'bro');
@@ -138,7 +142,7 @@ describe('SmartSearchBar', function () {
it('autocompletes value with enter', async function () {
const onSearchMock = jest.fn();
- render( );
+ render( );
const textbox = screen.getByRole('textbox');
await userEvent.type(textbox, 'bro');
@@ -161,7 +165,7 @@ describe('SmartSearchBar', function () {
});
it('searches and completes tags with negation operator', async function () {
- render( );
+ render( );
const textbox = screen.getByRole('textbox');
await userEvent.type(textbox, '!bro');
@@ -175,43 +179,51 @@ describe('SmartSearchBar', function () {
describe('componentWillReceiveProps()', function () {
it('should add a space when setting query', function () {
- render( );
+ render( );
expect(screen.getByRole('textbox')).toHaveValue('one ');
});
it('updates query when prop changes', function () {
- const {rerender} = render( );
+ const {rerender} = render(
+
+ );
- rerender( );
+ rerender( );
expect(screen.getByRole('textbox')).toHaveValue('two ');
});
it('updates query when prop set to falsey value', function () {
- const {rerender} = render( );
+ const {rerender} = render(
+
+ );
- rerender( );
+ rerender( );
expect(screen.getByRole('textbox')).toHaveValue('');
});
it('should not reset user textarea if a noop props change happens', async function () {
- const {rerender} = render( );
+ const {rerender} = render(
+
+ );
await userEvent.type(screen.getByRole('textbox'), 'two');
- rerender( );
+ rerender( );
expect(screen.getByRole('textbox')).toHaveValue('one two');
});
it('should reset user textarea if a meaningful props change happens', async function () {
- const {rerender} = render( );
+ const {rerender} = render(
+
+ );
await userEvent.type(screen.getByRole('textbox'), 'two');
- rerender( );
+ rerender( );
expect(screen.getByRole('textbox')).toHaveValue('blah ');
});
@@ -222,7 +234,11 @@ describe('SmartSearchBar', function () {
const mockOnSearch = jest.fn();
render(
-
+
);
expect(screen.getByRole('textbox')).toHaveValue('is:unresolved ');
@@ -238,7 +254,7 @@ describe('SmartSearchBar', function () {
describe('dropdown open state', function () {
it('opens the dropdown when the search box is clicked', async function () {
- render( );
+ render( );
const textbox = screen.getByRole('textbox');
@@ -248,7 +264,7 @@ describe('SmartSearchBar', function () {
});
it('opens the dropdown when the search box gains focus', function () {
- render( );
+ render( );
const textbox = screen.getByRole('textbox');
@@ -260,7 +276,7 @@ describe('SmartSearchBar', function () {
it('hides the drop down when clicking outside', async function () {
render(
-
+
);
@@ -275,7 +291,7 @@ describe('SmartSearchBar', function () {
});
it('hides the drop down when pressing escape', async function () {
- render( );
+ render( );
const textbox = screen.getByRole('textbox');
@@ -291,7 +307,7 @@ describe('SmartSearchBar', function () {
describe('pasting', function () {
it('trims pasted content', async function () {
const mockOnChange = jest.fn();
- render( );
+ render( );
const textbox = screen.getByRole('textbox');
@@ -306,7 +322,9 @@ describe('SmartSearchBar', function () {
it('invokes onSearch() on enter', async function () {
const mockOnSearch = jest.fn();
- render( );
+ render(
+
+ );
await userEvent.type(screen.getByRole('textbox'), '{Enter}');
@@ -314,7 +332,7 @@ describe('SmartSearchBar', function () {
});
it('handles an empty query', function () {
- render( );
+ render( );
expect(screen.getByRole('textbox')).toHaveValue('');
});
@@ -323,7 +341,7 @@ describe('SmartSearchBar', function () {
const getTagValuesMock = jest.fn().mockResolvedValue([]);
render(
- );
+ render( );
// Should have three invalid tokens (tag:, is:, and has:)
expect(screen.getAllByTestId('filter-token-invalid')).toHaveLength(3);
@@ -429,7 +447,7 @@ describe('SmartSearchBar', function () {
it('renders nested keys correctly', async function () {
render(
-
+
);
const textbox = screen.getByRole('textbox');
@@ -528,7 +550,7 @@ describe('SmartSearchBar', function () {
const getTagValuesMock = jest.fn().mockResolvedValue(['Chrome', 'Firefox']);
render(
-
@@ -819,7 +841,7 @@ describe('SmartSearchBar', function () {
it('can delete a middle token', async function () {
render(
-
@@ -840,7 +862,7 @@ describe('SmartSearchBar', function () {
it('can exclude a token', async function () {
render(
-
@@ -861,7 +883,7 @@ describe('SmartSearchBar', function () {
it('can include a token', async function () {
render(
-
@@ -885,7 +907,7 @@ describe('SmartSearchBar', function () {
});
it('displays invalid field message', async function () {
- render( );
+ render( );
const textbox = screen.getByRole('textbox');
@@ -897,7 +919,7 @@ describe('SmartSearchBar', function () {
});
it('displays invalid field messages for when wildcard is disallowed', async function () {
- render( );
+ render( );
const textbox = screen.getByRole('textbox');
@@ -922,7 +944,7 @@ describe('SmartSearchBar', function () {
});
it('displays date picker dropdown when appropriate', async () => {
- render( );
+ render( );
const textbox = screen.getByRole('textbox');
await userEvent.click(textbox);
@@ -957,7 +979,7 @@ describe('SmartSearchBar', function () {
});
it('can select a suggested relative time value', async () => {
- render( );
+ render( );
await userEvent.type(screen.getByRole('textbox'), 'lastSeen:');
@@ -967,7 +989,7 @@ describe('SmartSearchBar', function () {
});
it('can select a specific date/time', async () => {
- render( );
+ render( );
await userEvent.type(screen.getByRole('textbox'), 'lastSeen:');
@@ -1007,7 +1029,7 @@ describe('SmartSearchBar', function () {
});
it('can change an existing datetime', async () => {
- render( );
+ render( );
const textbox = screen.getByRole('textbox');
fireEvent.change(textbox, {
@@ -1035,7 +1057,7 @@ describe('SmartSearchBar', function () {
});
it('populates the date picker correctly for date without time', async () => {
- render( );
+ render( );
const textbox = screen.getByRole('textbox');
@@ -1055,7 +1077,12 @@ describe('SmartSearchBar', function () {
});
it('populates the date picker correctly for date with time and no timezone', async () => {
- render( );
+ render(
+
+ );
const textbox = screen.getByRole('textbox');
@@ -1074,7 +1101,10 @@ describe('SmartSearchBar', function () {
it('populates the date picker correctly for date with time and timezone', async () => {
render(
-
+
);
const textbox = screen.getByRole('textbox');
@@ -1113,7 +1143,7 @@ describe('SmartSearchBar', function () {
it('displays a default group with custom wrapper', async function () {
const mockOnChange = jest.fn();
render(
-
+
);
const textbox = screen.getByRole('textbox');
@@ -1154,7 +1187,7 @@ describe('SmartSearchBar', function () {
it('hides the default group after picking item with applyFilter', async function () {
render(
- {
+/**
+ * @deprecated use SearchQueryBuilder instead
+ */
+class DeprecatedSmartSearchBar extends Component {
static defaultProps = {
id: 'smart-search-input',
includeLabel: true,
@@ -557,7 +560,7 @@ class SmartSearchBar extends Component {
return;
}
- const entry = entries[0];
+ const entry = entries[0]!;
const {width} = entry.contentRect;
const actionCount = this.props.actionBarItems?.length ?? 0;
@@ -646,11 +649,11 @@ class SmartSearchBar extends Component {
if (this.searchInput.current && filterTokens.length > 0) {
maybeFocusInput(this.searchInput.current);
- let offset = filterTokens[0].location.end.offset;
+ let offset = filterTokens[0]!.location.end.offset;
if (token) {
const tokenIndex = filterTokens.findIndex(tok => tok === token);
if (tokenIndex !== -1 && tokenIndex + 1 < filterTokens.length) {
- offset = filterTokens[tokenIndex + 1].location.end.offset;
+ offset = filterTokens[tokenIndex + 1]!.location.end.offset;
}
}
@@ -955,12 +958,12 @@ class SmartSearchBar extends Component {
: 0;
// Clear previous selection
- const prevItem = flatSearchItems[currIndex];
+ const prevItem = flatSearchItems[currIndex]!;
searchGroups = getSearchGroupWithItemMarkedActive(searchGroups, prevItem, false);
// Set new selection
const activeItem = flatSearchItems[nextActiveSearchItem];
- searchGroups = getSearchGroupWithItemMarkedActive(searchGroups, activeItem, true);
+ searchGroups = getSearchGroupWithItemMarkedActive(searchGroups, activeItem!, true);
this.setState({searchGroups, activeSearchItem: nextActiveSearchItem});
}
@@ -1052,7 +1055,7 @@ class SmartSearchBar extends Component {
if (isSelectingDropdownItems) {
searchGroups = getSearchGroupWithItemMarkedActive(
searchGroups,
- flatSearchItems[activeSearchItem],
+ flatSearchItems[activeSearchItem]!,
false
);
}
@@ -1200,13 +1203,13 @@ class SmartSearchBar extends Component {
const innerStart = cursorPosition - cursorToken.location.start.offset;
let tokenStart = innerStart;
- while (tokenStart > 0 && !LIMITER_CHARS.includes(cursorToken.text[tokenStart - 1])) {
+ while (tokenStart > 0 && !LIMITER_CHARS.includes(cursorToken.text[tokenStart - 1]!)) {
tokenStart--;
}
let tokenEnd = innerStart;
while (
tokenEnd < cursorToken.text.length &&
- !LIMITER_CHARS.includes(cursorToken.text[tokenEnd])
+ !LIMITER_CHARS.includes(cursorToken.text[tokenEnd]!)
) {
tokenEnd++;
}
@@ -2199,14 +2202,14 @@ class SmartSearchBarContainer extends Component {
render() {
// SmartSearchBar doesn't use members, but we forward it to cause a re-render.
- return ;
+ return ;
}
}
export default withApi(withSentryRouter(withOrganization(SmartSearchBarContainer)));
export type {Props as SmartSearchBarProps};
-export {SmartSearchBar};
+export {DeprecatedSmartSearchBar};
const Container = styled('div')<{inputHasFocus: boolean}>`
min-height: ${p => p.theme.form.md.height}px;
diff --git a/static/app/components/smartSearchBar/searchBarDatePicker.tsx b/static/app/components/deprecatedSmartSearchBar/searchBarDatePicker.tsx
similarity index 100%
rename from static/app/components/smartSearchBar/searchBarDatePicker.tsx
rename to static/app/components/deprecatedSmartSearchBar/searchBarDatePicker.tsx
diff --git a/static/app/components/smartSearchBar/searchDropdown.tsx b/static/app/components/deprecatedSmartSearchBar/searchDropdown.tsx
similarity index 98%
rename from static/app/components/smartSearchBar/searchDropdown.tsx
rename to static/app/components/deprecatedSmartSearchBar/searchDropdown.tsx
index 12c04d9ce380e9..7361ad146242a0 100644
--- a/static/app/components/smartSearchBar/searchDropdown.tsx
+++ b/static/app/components/deprecatedSmartSearchBar/searchDropdown.tsx
@@ -22,7 +22,7 @@ import {invalidTypes, ItemType} from './types';
const getDropdownItemKey = (item: SearchItem) =>
`${item.value || item.desc || item.title}-${
- item.children && item.children.length > 0 ? getDropdownItemKey(item.children[0]) : ''
+ item.children && item.children.length > 0 ? getDropdownItemKey(item.children[0]!) : ''
}`;
type Props = {
@@ -244,7 +244,7 @@ function ItemTitle({item, searchSubstring, isChild}: ItemTitleProps) {
if (searchSubstring) {
const idx =
restWords.length === 0
- ? fullWord.toLowerCase().indexOf(searchSubstring.split('.')[0])
+ ? fullWord.toLowerCase().indexOf(searchSubstring.split('.')[0]!)
: fullWord.toLowerCase().indexOf(searchSubstring);
// Below is the logic to make the current query bold inside the result.
@@ -253,14 +253,14 @@ function ItemTitle({item, searchSubstring, isChild}: ItemTitleProps) {
{!isFirstWordHidden && (
- {firstWord.slice(0, idx)}
- {firstWord.slice(idx, idx + searchSubstring.length)}
- {firstWord.slice(idx + searchSubstring.length)}
+ {firstWord!.slice(0, idx)}
+ {firstWord!.slice(idx, idx + searchSubstring.length)}
+ {firstWord!.slice(idx + searchSubstring.length)}
)}
{combinedRestWords && (
{names?.map(name => (
-
+
))}
diff --git a/static/app/components/devtoolbar/components/infiniteListItems.tsx b/static/app/components/devtoolbar/components/infiniteListItems.tsx
index 772c1372c2a9d4..c025eb230ec3ca 100644
--- a/static/app/components/devtoolbar/components/infiniteListItems.tsx
+++ b/static/app/components/devtoolbar/components/infiniteListItems.tsx
@@ -79,7 +79,7 @@ export default function InfiniteListItems({
? hasNextPage
? loadingMoreMessage()
: loadingCompleteMessage()
- : itemRenderer({item})}
+ : itemRenderer({item: item!})}
);
})}
diff --git a/static/app/components/devtoolbar/components/releases/releasesPanel.tsx b/static/app/components/devtoolbar/components/releases/releasesPanel.tsx
index 5215f1b341846e..6bc3ad5067c1a6 100644
--- a/static/app/components/devtoolbar/components/releases/releasesPanel.tsx
+++ b/static/app/components/devtoolbar/components/releases/releasesPanel.tsx
@@ -53,7 +53,7 @@ function getCrashFreeRate(data: ApiResult): number {
// assume it is 100%.
// round to 2 decimal points
return parseFloat(
- ((data?.json.groups[0].totals['crash_free_rate(session)'] ?? 1) * 100).toFixed(2)
+ ((data?.json.groups[0]!.totals['crash_free_rate(session)'] ?? 1) * 100).toFixed(2)
);
}
@@ -101,7 +101,7 @@ function ReleaseSummary({orgSlug, release}: {orgSlug: string; release: Release})
{formatVersion(release.version)}
@@ -244,14 +244,14 @@ export default function ReleasesPanel() {
) : (
-
+
1 ? releaseData.json[1].version : undefined
+ releaseData.json.length > 1 ? releaseData.json[1]!.version : undefined
}
/>
-
+
)}
diff --git a/static/app/components/devtoolbar/hooks/useReplayRecorder.tsx b/static/app/components/devtoolbar/hooks/useReplayRecorder.tsx
index 70d1be99034da1..d88b66437c3631 100644
--- a/static/app/components/devtoolbar/hooks/useReplayRecorder.tsx
+++ b/static/app/components/devtoolbar/hooks/useReplayRecorder.tsx
@@ -1,6 +1,6 @@
import {useCallback, useEffect, useState} from 'react';
+import type {ReplayRecordingMode} from '@sentry/core';
import type {replayIntegration} from '@sentry/react';
-import type {ReplayRecordingMode} from '@sentry/types';
import useConfiguration from 'sentry/components/devtoolbar/hooks/useConfiguration';
import {useSessionStorage} from 'sentry/utils/useSessionStorage';
diff --git a/static/app/components/devtoolbar/hooks/useSentryClientAndScope.tsx b/static/app/components/devtoolbar/hooks/useSentryClientAndScope.tsx
index b95f283e1c4dd7..3c492995693b36 100644
--- a/static/app/components/devtoolbar/hooks/useSentryClientAndScope.tsx
+++ b/static/app/components/devtoolbar/hooks/useSentryClientAndScope.tsx
@@ -1,4 +1,4 @@
-import type {Client, Scope} from '@sentry/types';
+import type {Client, Scope} from '@sentry/core';
type V8Carrier = {
stack: {
diff --git a/static/app/components/discover/quickContextCommitRow.spec.tsx b/static/app/components/discover/quickContextCommitRow.spec.tsx
index e92d0f46927cc1..54a1615eba6bdd 100644
--- a/static/app/components/discover/quickContextCommitRow.spec.tsx
+++ b/static/app/components/discover/quickContextCommitRow.spec.tsx
@@ -76,7 +76,7 @@ describe('Quick Context Commit Row', () => {
const pullRequestLink = screen.getByText(
/feat\(quick-context-commit-row\): Added new component/
);
- expect(screen.queryByTestId('quick-context-commit-row-pr-link')).toBeInTheDocument();
+ expect(screen.getByTestId('quick-context-commit-row-pr-link')).toBeInTheDocument();
expect(pullRequestLink).toBeInTheDocument();
expect(pullRequestLink).toHaveAttribute(
'href',
diff --git a/static/app/components/discover/transactionsTable.tsx b/static/app/components/discover/transactionsTable.tsx
index 756d46557ef509..7fbd410dde4174 100644
--- a/static/app/components/discover/transactionsTable.tsx
+++ b/static/app/components/discover/transactionsTable.tsx
@@ -1,4 +1,4 @@
-import {Fragment, PureComponent} from 'react';
+import {Fragment} from 'react';
import styled from '@emotion/styled';
import type {Location, LocationDescriptor} from 'history';
@@ -51,21 +51,32 @@ type Props = {
titles?: string[];
};
-class TransactionsTable extends PureComponent {
- getTitles() {
- const {eventView, titles} = this.props;
- return titles ?? eventView.getFields();
- }
+function TransactionsTable(props: Props) {
+ const {
+ eventView,
+ titles,
+ tableData,
+ columnOrder,
+ organization,
+ location,
+ generateLink,
+ handleCellAction,
+ useAggregateAlias,
+ isLoading,
+ referrer,
+ } = props;
- renderHeader() {
- const {tableData, columnOrder} = this.props;
+ const getTitles = () => {
+ return titles ?? eventView.getFields();
+ };
+ const renderHeader = () => {
const tableMeta = tableData?.meta;
const generateSortLink = () => undefined;
- const tableTitles = this.getTitles();
+ const tableTitles = getTitles();
const headers = tableTitles.map((title, index) => {
- const column = columnOrder[index];
+ const column = columnOrder[index]!;
const align: Alignments = fieldAlignment(column.name, column.type, tableMeta);
if (column.key === 'span_ops_breakdown.relative') {
@@ -111,32 +122,22 @@ class TransactionsTable extends PureComponent {
});
return headers;
- }
+ };
- renderRow(
+ const renderRow = (
row: TableDataRow,
rowIndex: number,
- columnOrder: TableColumn[],
+ colOrder: TableColumn[],
tableMeta: MetaType
- ): React.ReactNode[] {
- const {
- eventView,
- organization,
- location,
- generateLink,
- handleCellAction,
- titles,
- useAggregateAlias,
- referrer,
- } = this.props;
+ ): React.ReactNode[] => {
const fields = eventView.getFields();
if (titles?.length) {
// Slice to match length of given titles
- columnOrder = columnOrder.slice(0, titles.length);
+ colOrder = colOrder.slice(0, titles.length);
}
- const resultsRow = columnOrder.map((column, index) => {
+ const resultsRow = colOrder.map((column, index) => {
const field = String(column.key);
// TODO add a better abstraction for this in fieldRenderers.
const fieldName = useAggregateAlias ? getAggregateAlias(field) : field;
@@ -162,7 +163,7 @@ class TransactionsTable extends PureComponent {
} else if (target && !isEmptyObject(target)) {
if (fields[index] === 'replayId') {
rendered = (
-
+
{rendered}
);
@@ -199,10 +200,9 @@ class TransactionsTable extends PureComponent {
});
return resultsRow;
- }
+ };
- renderResults() {
- const {isLoading, tableData, columnOrder} = this.props;
+ const renderResults = () => {
let cells: React.ReactNode[] = [];
if (isLoading) {
@@ -217,39 +217,35 @@ class TransactionsTable extends PureComponent {
if (!tableData.meta) {
return;
}
- cells = cells.concat(this.renderRow(row, i, columnOrder, tableData.meta));
+ cells = cells.concat(renderRow(row, i, columnOrder, tableData.meta));
});
return cells;
- }
-
- render() {
- const {isLoading, tableData} = this.props;
+ };
- const hasResults = tableData?.meta && tableData.data?.length > 0;
+ const hasResults = tableData?.meta && tableData.data?.length > 0;
- // Custom set the height so we don't have layout shift when results are loaded.
- const loader = ;
+ // Custom set the height so we don't have layout shift when results are loaded.
+ const loader = ;
- return (
-
+
-
- {this.renderResults()}
-
-
- );
- }
+ {renderResults()}
+
+
+ );
}
function getProfileAnalyticsHandler(organization: Organization, referrer?: string) {
diff --git a/static/app/components/draggableTabs/draggableTabList.tsx b/static/app/components/draggableTabs/draggableTabList.tsx
index b672cedb645fec..c921768f2d4d5e 100644
--- a/static/app/components/draggableTabs/draggableTabList.tsx
+++ b/static/app/components/draggableTabs/draggableTabList.tsx
@@ -23,7 +23,6 @@ import {motion, Reorder} from 'framer-motion';
import {Button} from 'sentry/components/button';
import {CompactSelect} from 'sentry/components/compactSelect';
import DropdownButton from 'sentry/components/dropdownButton';
-import {TabsContext} from 'sentry/components/tabs';
import {type BaseTabProps, Tab} from 'sentry/components/tabs/tab';
import {IconAdd, IconEllipsis} from 'sentry/icons';
import {t} from 'sentry/locale';
@@ -34,6 +33,7 @@ import {useDimensions} from 'sentry/utils/useDimensions';
import {useDimensionsMultiple} from 'sentry/utils/useDimensionsMultiple';
import {useNavigate} from 'sentry/utils/useNavigate';
import useOrganization from 'sentry/utils/useOrganization';
+import {IssueViewsContext} from 'sentry/views/issueList/groupSearchViewTabs/issueViews';
import type {DraggableTabListItemProps} from './item';
import {Item} from './item';
@@ -62,9 +62,9 @@ function useOverflowingTabs({state}: {state: TabListState[] = [];
for (let i = 0; i < tabsDimensions.length; i++) {
- totalWidth += tabsDimensions[i].width + 1; // 1 extra pixel for the divider
+ totalWidth += tabsDimensions[i]!.width + 1; // 1 extra pixel for the divider
if (totalWidth > availableWidth + 1) {
- overflowing.push(persistentTabs[i]);
+ overflowing.push(persistentTabs[i]!);
}
}
@@ -273,7 +273,7 @@ function BaseDraggableTabList({
}: BaseDraggableTabListProps) {
const navigate = useNavigate();
const [hoveringKey, setHoveringKey] = useState(null);
- const {rootProps, setTabListState} = useContext(TabsContext);
+ const {rootProps, setTabListState} = useContext(IssueViewsContext);
const organization = useOrganization();
const {
value,
diff --git a/static/app/components/dropdownAutoComplete/list.tsx b/static/app/components/dropdownAutoComplete/list.tsx
index 275762171ec8b8..37e43b5342fdb3 100644
--- a/static/app/components/dropdownAutoComplete/list.tsx
+++ b/static/app/components/dropdownAutoComplete/list.tsx
@@ -82,7 +82,7 @@ function List({
onScroll={onScroll}
rowCount={items.length}
rowHeight={({index}) =>
- items[index].groupLabel && virtualizedLabelHeight
+ items[index]!.groupLabel && virtualizedLabelHeight
? virtualizedLabelHeight
: virtualizedHeight
}
@@ -90,8 +90,8 @@ function List({
)}
diff --git a/static/app/components/dropdownMenu/index.spec.tsx b/static/app/components/dropdownMenu/index.spec.tsx
index 853e38b0d2a8ec..ad7d20d1a6e30c 100644
--- a/static/app/components/dropdownMenu/index.spec.tsx
+++ b/static/app/components/dropdownMenu/index.spec.tsx
@@ -319,7 +319,6 @@ describe('DropdownMenu', function () {
// JSDOM throws an error on navigation to random urls
expect(errorSpy).toHaveBeenCalledTimes(1);
- // eslint-disable-next-line no-console
errorSpy.mockRestore();
});
});
diff --git a/static/app/components/emptyStateWarning.stories.tsx b/static/app/components/emptyStateWarning.stories.tsx
new file mode 100644
index 00000000000000..ee5373727667be
--- /dev/null
+++ b/static/app/components/emptyStateWarning.stories.tsx
@@ -0,0 +1,54 @@
+import {Fragment} from 'react';
+import styled from '@emotion/styled';
+
+import EmptyStateWarning from 'sentry/components/emptyStateWarning';
+import JSXProperty from 'sentry/components/stories/jsxProperty';
+import storyBook from 'sentry/stories/storyBook';
+import {space} from 'sentry/styles/space';
+
+export default storyBook(EmptyStateWarning, story => {
+ story('Default', () => (
+
+
+ The default EmptyStateWarning
looks like this, with a large icon.
+
+
+
+ ));
+
+ story('Props', () => (
+
+
+ You can also pass in several props:{' '}
+
+
+ small
determines the icon size.
+
+
+ withIcon
hides the icon if set to false
.
+
+ You can also pass in children in combination with icon or no icon.
+
+
+
+
+
+
+
+ with children
+
+ No results found.
+
+ with children (with styling)
+
+ No results found.
+
+ ));
+});
+
+const StyledEmptyStateWarning = styled(EmptyStateWarning)`
+ display: flex;
+ flex-direction: column;
+ gap: ${space(2)};
+ align-items: center;
+`;
diff --git a/static/app/components/events/autofix/autofixActionSelector.tsx b/static/app/components/events/autofix/autofixActionSelector.tsx
new file mode 100644
index 00000000000000..0cf7822f8e2d38
--- /dev/null
+++ b/static/app/components/events/autofix/autofixActionSelector.tsx
@@ -0,0 +1,103 @@
+import styled from '@emotion/styled';
+import {AnimatePresence, motion} from 'framer-motion';
+
+import {Button} from 'sentry/components/button';
+import ButtonBar from 'sentry/components/buttonBar';
+import {IconArrow} from 'sentry/icons';
+import {t} from 'sentry/locale';
+import {space} from 'sentry/styles/space';
+import testableTransition from 'sentry/utils/testableTransition';
+
+interface Option {
+ key: T;
+ label: string;
+ active?: boolean;
+}
+
+interface Props {
+ children: (selectedOption: Option) => React.ReactNode;
+ onBack: () => void;
+ onSelect: (value: T) => void;
+ options: Option[];
+ selected: T | null;
+}
+
+function AutofixActionSelector({
+ options,
+ selected,
+ onSelect,
+ onBack,
+ children,
+}: Props) {
+ const selectedOption = options.find(opt => opt.key === selected);
+
+ return (
+
+
+ {!selected ? (
+
+
+ {options.map(option => (
+ onSelect(option.key)}
+ >
+ {option.label}
+
+ ))}
+
+
+ ) : (
+
+
+ }
+ onClick={onBack}
+ title={t('Back to options')}
+ aria-label={t('Back to options')}
+ />
+ {selectedOption && children(selectedOption)}
+
+
+ )}
+
+
+ );
+}
+
+const Container = styled('div')`
+ min-height: 40px;
+`;
+
+const ContentWrapper = styled('div')`
+ display: flex;
+ align-items: center;
+ gap: ${space(1)};
+`;
+
+const BackButton = styled(Button)`
+ flex-shrink: 0;
+ height: 40px;
+`;
+
+const ContentArea = styled('div')`
+ flex-grow: 1;
+`;
+
+export default AutofixActionSelector;
diff --git a/static/app/components/events/autofix/autofixChanges.tsx b/static/app/components/events/autofix/autofixChanges.tsx
index 223b0a9790d8a8..014e76a2a5c8cf 100644
--- a/static/app/components/events/autofix/autofixChanges.tsx
+++ b/static/app/components/events/autofix/autofixChanges.tsx
@@ -49,10 +49,24 @@ function AutofixRepoChange({
}
const cardAnimationProps: AnimationProps = {
- exit: {opacity: 0},
- initial: {opacity: 0, y: 20},
- animate: {opacity: 1, y: 0},
- transition: testableTransition({duration: 0.3}),
+ exit: {opacity: 0, height: 0, scale: 0.8, y: -20},
+ initial: {opacity: 0, height: 0, scale: 0.8},
+ animate: {opacity: 1, height: 'auto', scale: 1},
+ transition: testableTransition({
+ duration: 1.0,
+ height: {
+ type: 'spring',
+ bounce: 0.2,
+ },
+ scale: {
+ type: 'spring',
+ bounce: 0.2,
+ },
+ y: {
+ type: 'tween',
+ ease: 'easeOut',
+ },
+ }),
};
export function AutofixChanges({step, groupId, runId}: AutofixChangesProps) {
@@ -116,7 +130,9 @@ const PreviewContent = styled('div')`
margin-top: ${space(2)};
`;
-const AnimationWrapper = styled(motion.div)``;
+const AnimationWrapper = styled(motion.div)`
+ transform-origin: top center;
+`;
const PrefixText = styled('span')``;
diff --git a/static/app/components/events/autofix/autofixDiff.spec.tsx b/static/app/components/events/autofix/autofixDiff.spec.tsx
index 89b603613c2089..b381b71f368d92 100644
--- a/static/app/components/events/autofix/autofixDiff.spec.tsx
+++ b/static/app/components/events/autofix/autofixDiff.spec.tsx
@@ -125,8 +125,8 @@ describe('AutofixDiff', function () {
await waitFor(() => {
expect(screen.queryByTestId('line-added')).not.toBeInTheDocument();
- expect(screen.queryByTestId('line-removed')).not.toBeInTheDocument();
});
+ expect(screen.queryByTestId('line-removed')).not.toBeInTheDocument();
});
it('shows error message on failed edit', async function () {
diff --git a/static/app/components/events/autofix/autofixDiff.tsx b/static/app/components/events/autofix/autofixDiff.tsx
index c076e645adeed3..d69b92e88c56e8 100644
--- a/static/app/components/events/autofix/autofixDiff.tsx
+++ b/static/app/components/events/autofix/autofixDiff.tsx
@@ -43,7 +43,7 @@ function makeTestIdFromLineType(lineType: DiffLineType) {
function addChangesToDiffLines(lines: DiffLineWithChanges[]): DiffLineWithChanges[] {
for (let i = 0; i < lines.length; i++) {
- const line = lines[i];
+ const line = lines[i]!;
if (line.line_type === DiffLineType.CONTEXT) {
continue;
}
@@ -293,7 +293,7 @@ function DiffHunkContent({
// Update diff_line_no for all lines after the insertion
for (let i = insertionIndex + newAddedLines.length; i < updatedLines.length; i++) {
- updatedLines[i].diff_line_no = ++lastDiffLineNo;
+ updatedLines[i]!.diff_line_no = ++lastDiffLineNo;
}
updateHunk.mutate({hunkIndex, lines: updatedLines, repoId, fileName});
@@ -332,15 +332,15 @@ function DiffHunkContent({
};
const getStartLineNumber = (index: number, lineType: DiffLineType) => {
- const line = linesWithChanges[index];
+ const line = linesWithChanges[index]!;
if (lineType === DiffLineType.REMOVED) {
return line.source_line_no;
}
if (lineType === DiffLineType.ADDED) {
// Find the first non-null target_line_no
for (let i = index; i < linesWithChanges.length; i++) {
- if (linesWithChanges[i].target_line_no !== null) {
- return linesWithChanges[i].target_line_no;
+ if (linesWithChanges[i]!.target_line_no !== null) {
+ return linesWithChanges[i]!.target_line_no;
}
}
}
diff --git a/static/app/components/events/autofix/autofixFeedback.tsx b/static/app/components/events/autofix/autofixFeedback.tsx
index 1d6010fed3b065..32e9bdc2445ea8 100644
--- a/static/app/components/events/autofix/autofixFeedback.tsx
+++ b/static/app/components/events/autofix/autofixFeedback.tsx
@@ -1,4 +1,5 @@
import {useRef} from 'react';
+import styled from '@emotion/styled';
import {Button} from 'sentry/components/button';
import {IconMegaphone} from 'sentry/icons/iconMegaphone';
@@ -14,9 +15,10 @@ function AutofixFeedback() {
}
return (
- }
onClick={() =>
openForm({
@@ -29,8 +31,16 @@ function AutofixFeedback() {
}
>
{t('Give Feedback')}
-
+
);
}
+const StyledButton = styled(Button)`
+ padding: 0;
+ margin: 0;
+ font-size: ${p => p.theme.fontSizeSmall};
+ font-weight: ${p => p.theme.fontWeightNormal};
+ color: ${p => p.theme.subText};
+`;
+
export default AutofixFeedback;
diff --git a/static/app/components/events/autofix/autofixInsightCards.spec.tsx b/static/app/components/events/autofix/autofixInsightCards.spec.tsx
index 864c4f89462e14..8ee14f7e5cee1e 100644
--- a/static/app/components/events/autofix/autofixInsightCards.spec.tsx
+++ b/static/app/components/events/autofix/autofixInsightCards.spec.tsx
@@ -122,13 +122,6 @@ describe('AutofixInsightCards', () => {
expect(userMessage.closest('div')).toHaveStyle('color: inherit');
});
- it('renders "No insights yet" message when there are no insights', () => {
- renderComponent({insights: []});
- expect(
- screen.getByText(/Autofix will share its discoveries here./)
- ).toBeInTheDocument();
- });
-
it('toggles context expansion correctly', async () => {
renderComponent();
const contextButton = screen.getByText('Sample insight 1');
diff --git a/static/app/components/events/autofix/autofixInsightCards.tsx b/static/app/components/events/autofix/autofixInsightCards.tsx
index 52a896e384f74a..f8f0db2d09557a 100644
--- a/static/app/components/events/autofix/autofixInsightCards.tsx
+++ b/static/app/components/events/autofix/autofixInsightCards.tsx
@@ -53,8 +53,8 @@ function AutofixBreadcrumbSnippet({breadcrumb}: AutofixBreadcrumbSnippetProps) {
const rawCrumb = {
message: breadcrumb.body,
category: breadcrumb.category,
- type: type,
- level: level,
+ type,
+ level,
};
return (
@@ -123,10 +123,24 @@ export function ExpandableInsightContext({
}
const animationProps: AnimationProps = {
- exit: {opacity: 0},
- initial: {opacity: 0, y: 20},
- animate: {opacity: 1, y: 0},
- transition: testableTransition({duration: 0.3}),
+ exit: {opacity: 0, height: 0, scale: 0.8, y: -20},
+ initial: {opacity: 0, height: 0, scale: 0.8},
+ animate: {opacity: 1, height: 'auto', scale: 1},
+ transition: testableTransition({
+ duration: 1.0,
+ height: {
+ type: 'spring',
+ bounce: 0.2,
+ },
+ scale: {
+ type: 'spring',
+ bounce: 0.2,
+ },
+ y: {
+ type: 'tween',
+ ease: 'easeOut',
+ },
+ }),
};
interface AutofixInsightCardProps {
@@ -348,15 +362,7 @@ function AutofixInsightCards({
)
)
) : stepIndex === 0 && !hasStepBelow ? (
-
- Autofix will share its discoveries here.
-
- Autofix is like an AI rubber ducky to help you debug your code.
-
- Collaborate with it and share your own knowledge and opinions for the best
- results.
-
-
+
) : hasStepBelow ? (
p.theme.subText};
- padding-top: ${space(4)};
`;
const EmptyResultsContainer = styled('div')`
@@ -611,6 +613,18 @@ const InsightContainer = styled(motion.div)`
box-shadow: ${p => p.theme.dropShadowMedium};
margin-left: ${space(2)};
margin-right: ${space(2)};
+ animation: fadeFromActive 1.2s ease-out;
+
+ @keyframes fadeFromActive {
+ from {
+ background-color: ${p => p.theme.active};
+ border-color: ${p => p.theme.active};
+ }
+ to {
+ background-color: ${p => p.theme.background};
+ border-color: ${p => p.theme.innerBorder};
+ }
+ }
`;
const ArrowContainer = styled('div')`
@@ -789,7 +803,22 @@ const StyledStructuredEventData = styled(StructuredEventData)`
border-top-right-radius: 0;
`;
-const AnimationWrapper = styled(motion.div)``;
+const AnimationWrapper = styled(motion.div)`
+ transform-origin: top center;
+
+ &.new-insight {
+ animation: textFadeFromActive 1.2s ease-out;
+ }
+
+ @keyframes textFadeFromActive {
+ from {
+ color: ${p => p.theme.white};
+ }
+ to {
+ color: inherit;
+ }
+ }
+`;
const StyledIconChevron = styled(IconChevron)`
width: 5%;
diff --git a/static/app/components/events/autofix/autofixMessageBox.analytics.spec.tsx b/static/app/components/events/autofix/autofixMessageBox.analytics.spec.tsx
index 54d1ed27006d71..90c0902494c002 100644
--- a/static/app/components/events/autofix/autofixMessageBox.analytics.spec.tsx
+++ b/static/app/components/events/autofix/autofixMessageBox.analytics.spec.tsx
@@ -45,12 +45,14 @@ describe('AutofixMessageBox Analytics', () => {
mockButton.mockClear();
});
- it('passes correct analytics props for suggested root cause without instructions', () => {
+ it('passes correct analytics props for suggested root cause without instructions', async () => {
const onSendMock = jest.fn();
render(
);
+ await userEvent.click(screen.getByRole('button', {name: 'Use suggested root cause'}));
+
expect(mockButton).toHaveBeenLastCalledWith(
expect.objectContaining({
analyticsEventKey: 'autofix.create_fix_clicked',
@@ -70,6 +72,8 @@ describe('AutofixMessageBox Analytics', () => {
);
+ await userEvent.click(screen.getByRole('button', {name: 'Use suggested root cause'}));
+
const input = screen.getByPlaceholderText(
'(Optional) Provide any instructions for the fix...'
);
@@ -94,7 +98,7 @@ describe('AutofixMessageBox Analytics', () => {
);
- await userEvent.click(screen.getAllByText('Propose your own root cause')[0]);
+ await userEvent.click(screen.getAllByText('Propose your own root cause')[0]!);
const customInput = screen.getByPlaceholderText('Propose your own root cause...');
await userEvent.type(customInput, 'Custom root cause');
@@ -126,7 +130,7 @@ describe('AutofixMessageBox Analytics', () => {
render( );
- await userEvent.click(screen.getByRole('radio', {name: 'Approve changes'}));
+ await userEvent.click(screen.getByRole('button', {name: 'Approve'}));
// Find the last call to Button that matches our Create PR button
const createPRButtonCall = mockButton.mock.calls.find(
@@ -156,7 +160,7 @@ describe('AutofixMessageBox Analytics', () => {
render( );
- await userEvent.click(screen.getByRole('radio', {name: 'Approve changes'}));
+ await userEvent.click(screen.getByRole('button', {name: 'Approve'}));
// Find the last call to Button that matches our Setup button
const setupButtonCall = mockButton.mock.calls.find(
diff --git a/static/app/components/events/autofix/autofixMessageBox.spec.tsx b/static/app/components/events/autofix/autofixMessageBox.spec.tsx
index 1954cd41683551..e0a249c45316af 100644
--- a/static/app/components/events/autofix/autofixMessageBox.spec.tsx
+++ b/static/app/components/events/autofix/autofixMessageBox.spec.tsx
@@ -74,6 +74,15 @@ describe('AutofixMessageBox', () => {
(addSuccessMessage as jest.Mock).mockClear();
(addErrorMessage as jest.Mock).mockClear();
MockApiClient.clearMockResponses();
+
+ MockApiClient.addMockResponse({
+ url: '/issues/123/autofix/setup/?check_write_access=true',
+ method: 'GET',
+ body: {
+ genAIConsent: {ok: true},
+ integration: {ok: true},
+ },
+ });
});
it('renders correctly with default props', () => {
@@ -81,7 +90,7 @@ describe('AutofixMessageBox', () => {
expect(screen.getByText('Test display text')).toBeInTheDocument();
expect(
- screen.getByPlaceholderText('Share helpful context or feedback...')
+ screen.getByPlaceholderText('Share helpful context or directions...')
).toBeInTheDocument();
expect(screen.getByRole('button', {name: 'Send'})).toBeInTheDocument();
});
@@ -90,7 +99,7 @@ describe('AutofixMessageBox', () => {
const onSendMock = jest.fn();
render( );
- const input = screen.getByPlaceholderText('Share helpful context or feedback...');
+ const input = screen.getByPlaceholderText('Share helpful context or directions...');
await userEvent.type(input, 'Test message');
await userEvent.click(screen.getByRole('button', {name: 'Send'}));
@@ -106,7 +115,7 @@ describe('AutofixMessageBox', () => {
render( );
- const input = screen.getByPlaceholderText('Share helpful context or feedback...');
+ const input = screen.getByPlaceholderText('Share helpful context or directions...');
await userEvent.type(input, 'Test message');
await userEvent.click(screen.getByRole('button', {name: 'Send'}));
@@ -127,7 +136,7 @@ describe('AutofixMessageBox', () => {
render( );
- const input = screen.getByPlaceholderText('Share helpful context or feedback...');
+ const input = screen.getByPlaceholderText('Share helpful context or directions...');
await userEvent.type(input, 'Test message');
await userEvent.click(screen.getByRole('button', {name: 'Send'}));
@@ -164,6 +173,7 @@ describe('AutofixMessageBox', () => {
);
// Test suggested root cause
+ await userEvent.click(screen.getByRole('button', {name: 'Use suggested root cause'}));
const input = screen.getByPlaceholderText(
'(Optional) Provide any instructions for the fix...'
);
@@ -180,7 +190,9 @@ describe('AutofixMessageBox', () => {
);
// Test custom root cause
- await userEvent.click(screen.getAllByText('Propose your own root cause')[0]);
+ await userEvent.click(
+ screen.getByRole('button', {name: 'Propose your own root cause'})
+ );
const customInput = screen.getByPlaceholderText('Propose your own root cause...');
await userEvent.type(customInput, 'Custom root cause');
await userEvent.click(screen.getByRole('button', {name: 'Send'}));
@@ -191,20 +203,23 @@ describe('AutofixMessageBox', () => {
it('renders segmented control for changes step', () => {
render( );
- expect(screen.getByRole('radio', {name: 'Give feedback'})).toBeInTheDocument();
- expect(screen.getByRole('radio', {name: 'Approve changes'})).toBeInTheDocument();
+ expect(screen.getByRole('button', {name: 'Iterate'})).toBeInTheDocument();
+ expect(screen.getByRole('button', {name: 'Approve'})).toBeInTheDocument();
+ expect(screen.getByRole('button', {name: 'Add tests'})).toBeInTheDocument();
});
- it('shows feedback input when "Give feedback" is selected', () => {
+ it('shows feedback input when "Iterate" is selected', async () => {
render( );
+ await userEvent.click(screen.getByRole('button', {name: 'Iterate'}));
+
expect(
- screen.getByPlaceholderText('Share helpful context or feedback...')
+ screen.getByPlaceholderText('Share helpful context or directions...')
).toBeInTheDocument();
expect(screen.getByRole('button', {name: 'Send'})).toBeInTheDocument();
});
- it('shows "Create PR" button when "Approve changes" is selected', async () => {
+ it('shows "Create PR" button when "Approve" is selected', async () => {
MockApiClient.addMockResponse({
url: '/issues/123/autofix/setup/?check_write_access=true',
method: 'GET',
@@ -219,7 +234,7 @@ describe('AutofixMessageBox', () => {
render( );
- await userEvent.click(screen.getByRole('radio', {name: 'Approve changes'}));
+ await userEvent.click(screen.getByRole('button', {name: 'Approve'}));
expect(
screen.getByText('Draft 1 pull request for the above changes?')
@@ -250,7 +265,7 @@ describe('AutofixMessageBox', () => {
render( );
- await userEvent.click(screen.getByRole('radio', {name: 'Approve changes'}));
+ await userEvent.click(screen.getByRole('button', {name: 'Approve'}));
expect(
screen.getByText('Draft 2 pull requests for the above changes?')
@@ -308,7 +323,7 @@ describe('AutofixMessageBox', () => {
render( );
- await userEvent.click(screen.getByRole('radio', {name: 'Approve changes'}));
+ await userEvent.click(screen.getByRole('button', {name: 'Approve'}));
expect(
screen.getByText('Draft 1 pull request for the above changes?')
@@ -326,18 +341,18 @@ describe('AutofixMessageBox', () => {
).toBeInTheDocument();
});
- it('shows segmented control with "Add tests" option for changes step', () => {
+ it('shows segmented control options for changes step', () => {
render( );
- expect(screen.getByRole('radio', {name: 'Give feedback'})).toBeInTheDocument();
- expect(screen.getByRole('radio', {name: 'Add tests'})).toBeInTheDocument();
- expect(screen.getByRole('radio', {name: 'Approve changes'})).toBeInTheDocument();
+ expect(screen.getByRole('button', {name: 'Approve'})).toBeInTheDocument();
+ expect(screen.getByRole('button', {name: 'Iterate'})).toBeInTheDocument();
+ expect(screen.getByRole('button', {name: 'Add tests'})).toBeInTheDocument();
});
- it('shows "Add Tests" button and static message when "Add tests" is selected', async () => {
+ it('shows "Test" button and static message when "Test" is selected', async () => {
render( );
- await userEvent.click(screen.getByRole('radio', {name: 'Add tests'}));
+ await userEvent.click(screen.getByRole('button', {name: 'Add tests'}));
expect(
screen.getByText('Write unit tests to make sure the issue is fixed?')
@@ -345,7 +360,7 @@ describe('AutofixMessageBox', () => {
expect(screen.getByRole('button', {name: 'Add Tests'})).toBeInTheDocument();
});
- it('sends correct message when "Add Tests" is clicked without onSend prop', async () => {
+ it('sends correct message when "Test" is clicked without onSend prop', async () => {
MockApiClient.addMockResponse({
method: 'POST',
url: '/issues/123/autofix/update/',
@@ -354,7 +369,7 @@ describe('AutofixMessageBox', () => {
render( );
- await userEvent.click(screen.getByRole('radio', {name: 'Add tests'}));
+ await userEvent.click(screen.getByRole('button', {name: 'Add tests'}));
await userEvent.click(screen.getByRole('button', {name: 'Add Tests'}));
await waitFor(() => {
diff --git a/static/app/components/events/autofix/autofixMessageBox.tsx b/static/app/components/events/autofix/autofixMessageBox.tsx
index 62ac0ca81ad643..fe8106687d5ba8 100644
--- a/static/app/components/events/autofix/autofixMessageBox.tsx
+++ b/static/app/components/events/autofix/autofixMessageBox.tsx
@@ -5,6 +5,8 @@ import {AnimatePresence, type AnimationProps, motion} from 'framer-motion';
import {addErrorMessage, addSuccessMessage} from 'sentry/actionCreators/indicator';
import {openModal} from 'sentry/actionCreators/modal';
import {Button, LinkButton} from 'sentry/components/button';
+import AutofixActionSelector from 'sentry/components/events/autofix/autofixActionSelector';
+import AutofixFeedback from 'sentry/components/events/autofix/autofixFeedback';
import {AutofixSetupWriteAccessModal} from 'sentry/components/events/autofix/autofixSetupWriteAccessModal';
import {
type AutofixCodebaseChange,
@@ -20,13 +22,12 @@ import {useAutofixSetup} from 'sentry/components/events/autofix/useAutofixSetup'
import Input from 'sentry/components/input';
import LoadingIndicator from 'sentry/components/loadingIndicator';
import {ScrollCarousel} from 'sentry/components/scrollCarousel';
-import {SegmentedControl} from 'sentry/components/segmentedControl';
import {
+ IconChat,
IconCheckmark,
IconChevron,
IconClose,
IconFatal,
- IconFocus,
IconOpen,
IconSad,
} from 'sentry/icons';
@@ -178,14 +179,14 @@ function SetupAndCreatePRsButton({
interface RootCauseAndFeedbackInputAreaProps {
actionText: string;
- changesMode: 'give_feedback' | 'add_tests' | 'create_prs';
+ changesMode: 'give_feedback' | 'add_tests' | 'create_prs' | null;
groupId: string;
handleSend: (e: FormEvent) => void;
isRootCauseSelectionStep: boolean;
message: string;
primaryAction: boolean;
responseRequired: boolean;
- rootCauseMode: 'suggested_root_cause' | 'custom_root_cause';
+ rootCauseMode: 'suggested_root_cause' | 'custom_root_cause' | null;
setMessage: (message: string) => void;
}
@@ -212,7 +213,7 @@ function RootCauseAndFeedbackInputArea({
onChange={e => setMessage(e.target.value)}
placeholder={
!isRootCauseSelectionStep
- ? 'Share helpful context or feedback...'
+ ? 'Share helpful context or directions...'
: rootCauseMode === 'suggested_root_cause'
? '(Optional) Provide any instructions for the fix...'
: 'Propose your own root cause...'
@@ -280,23 +281,19 @@ function StepIcon({step}: {step: AutofixStep}) {
if (step.changes.every(change => change.pull_request)) {
return ;
}
- return ;
+ return null;
}
if (step.type === AutofixStepType.ROOT_CAUSE_ANALYSIS) {
if (step.causes?.length === 0) {
return ;
}
- return step.selection ? (
-
- ) : (
-
- );
+ return step.selection ? : null;
}
switch (step.status) {
case AutofixStatus.WAITING_FOR_USER_RESPONSE:
- return ;
+ return ;
case AutofixStatus.PROCESSING:
return ;
case AutofixStatus.CANCELLED:
@@ -338,12 +335,12 @@ function AutofixMessageBox({
const contentRef = useRef(null);
const [rootCauseMode, setRootCauseMode] = useState<
- 'suggested_root_cause' | 'custom_root_cause'
- >('suggested_root_cause');
+ 'suggested_root_cause' | 'custom_root_cause' | null
+ >(null);
const [changesMode, setChangesMode] = useState<
- 'give_feedback' | 'add_tests' | 'create_prs'
- >('give_feedback');
+ 'give_feedback' | 'add_tests' | 'create_prs' | null
+ >(null);
const changes =
isChangesStep && step?.type === AutofixStepType.CHANGES ? step.changes : [];
@@ -407,6 +404,9 @@ function AutofixMessageBox({
__html: singleLineRenderer(step.title),
}}
/>
+
+
+
{scrollIntoView !== null && (
@@ -425,9 +425,7 @@ function AutofixMessageBox({
)}
-
-
-
+
)}
@@ -438,114 +436,132 @@ function AutofixMessageBox({
/>
{!isDisabled && (
-
- {isRootCauseSelectionStep && (
-
-
-
- {t('Use suggested root cause')}
-
-
- {t('Propose your own root cause')}
-
-
-
- )}
- {isChangesStep && !prsMade && (
-
-
-
- {t('Give feedback')}
-
-
- {t('Add tests')}
-
-
- {t('Approve changes')}
-
-
-
+
+ {isRootCauseSelectionStep ? (
+ setRootCauseMode(value)}
+ onBack={() => setRootCauseMode(null)}
+ >
+ {option => (
+
+ )}
+
+ ) : isChangesStep && !prsMade ? (
+ setChangesMode(value)}
+ onBack={() => setChangesMode(null)}
+ >
+ {option => (
+
+ {option.key === 'give_feedback' && (
+
+ )}
+ {option.key === 'add_tests' && (
+
+ )}
+ {option.key === 'create_prs' && (
+
+
+ Draft {changes.length} pull request
+ {changes.length > 1 ? 's' : ''} for the above changes?
+
+
+
+ )}
+
+ )}
+
+ ) : isChangesStep && prsMade ? (
+
+ {changes.map(
+ change =>
+ change.pull_request?.pr_url && (
+ }
+ href={change.pull_request.pr_url}
+ external
+ >
+ View PR in {change.repo_name}
+
+ )
+ )}
+
+ ) : (
+
)}
-
+
)}
+ {isDisabled && }
-
- {(!isChangesStep || changesMode === 'give_feedback') &&
- !prsMade &&
- !isDisabled && (
-
- )}
- {isChangesStep && changesMode === 'add_tests' && !prsMade && (
-
- )}
- {isChangesStep && changesMode === 'create_prs' && !prsMade && (
-
-
-
- Draft {changes.length} pull request{changes.length > 1 ? 's' : ''} for the
- above changes?
-
-
-
-
- )}
- {isChangesStep && prsMade && (
-
- {changes.map(
- change =>
- change.pull_request?.pr_url && (
- }
- href={change.pull_request.pr_url}
- external
- >
- View PR in {change.repo_name}
-
- )
- )}
-
- )}
-
);
}
+const Placeholder = styled('div')`
+ padding: ${space(1)};
+`;
+
const ViewPRButtons = styled(ScrollCarousel)`
width: 100%;
padding: 0 ${space(1)};
@@ -593,7 +609,6 @@ const StepTitle = styled('div')`
white-space: nowrap;
display: flex;
align-items: center;
- flex-grow: 1;
span {
margin-right: ${space(1)};
@@ -609,6 +624,7 @@ const StepHeaderRightSection = styled('div')`
const StepIconContainer = styled('div')`
display: flex;
align-items: center;
+ margin-right: auto;
`;
const StepHeader = styled('div')`
@@ -618,6 +634,7 @@ const StepHeader = styled('div')`
padding: 0 ${space(1)} ${space(1)} ${space(1)};
font-size: ${p => p.theme.fontSizeMedium};
font-family: ${p => p.theme.text.family};
+ gap: ${space(1)};
`;
const InputArea = styled('div')`
@@ -630,7 +647,6 @@ const StaticMessage = styled('p')`
padding-top: ${space(1)};
padding-left: ${space(1)};
margin-bottom: 0;
- color: ${p => p.theme.subText};
border-top: 1px solid ${p => p.theme.border};
`;
@@ -654,13 +670,8 @@ const ProcessingStatusIndicator = styled(LoadingIndicator)`
}
`;
-const ActionBar = styled('div')`
- padding-bottom: ${space(1)};
- padding-left: ${space(2)};
-`;
-
const InputSection = styled('div')`
- padding: 0 ${space(2)} ${space(2)} ${space(2)};
+ padding: ${space(0.5)} ${space(2)} ${space(2)};
`;
export default AutofixMessageBox;
diff --git a/static/app/components/events/autofix/autofixOutputStream.tsx b/static/app/components/events/autofix/autofixOutputStream.tsx
new file mode 100644
index 00000000000000..4dff30816dd6bb
--- /dev/null
+++ b/static/app/components/events/autofix/autofixOutputStream.tsx
@@ -0,0 +1,137 @@
+import {useEffect, useRef, useState} from 'react';
+import {keyframes} from '@emotion/react';
+import styled from '@emotion/styled';
+import {AnimatePresence, motion} from 'framer-motion';
+
+import {IconArrow} from 'sentry/icons';
+import {space} from 'sentry/styles/space';
+import testableTransition from 'sentry/utils/testableTransition';
+
+interface Props {
+ stream: string;
+}
+
+const shimmer = keyframes`
+ 0% {
+ background-position: -1000px 0;
+ }
+ 100% {
+ background-position: 1000px 0;
+ }
+`;
+
+export function AutofixOutputStream({stream}: Props) {
+ const [displayedText, setDisplayedText] = useState('');
+ const previousText = useRef('');
+ const currentIndexRef = useRef(0);
+
+ useEffect(() => {
+ const newText = stream;
+
+ // Reset animation if the new text is completely different
+ if (!newText.startsWith(displayedText)) {
+ previousText.current = newText;
+ currentIndexRef.current = 0;
+ setDisplayedText('');
+ }
+
+ const interval = window.setInterval(() => {
+ if (currentIndexRef.current < newText.length) {
+ setDisplayedText(newText.slice(0, currentIndexRef.current + 1));
+ currentIndexRef.current++;
+ } else {
+ window.clearInterval(interval);
+ }
+ }, 15);
+
+ return () => {
+ window.clearInterval(interval);
+ };
+ }, [displayedText, stream]);
+
+ return (
+
+
+
+
+ {displayedText}
+
+
+
+ );
+}
+
+const Wrapper = styled(motion.div)`
+ display: flex;
+ flex-direction: column;
+ align-items: center;
+ margin: ${space(1)} ${space(4)};
+ gap: ${space(1)};
+ overflow: hidden;
+`;
+
+const StreamContainer = styled(motion.div)`
+ position: relative;
+ width: 100%;
+ border-radius: ${p => p.theme.borderRadius};
+ background: ${p => p.theme.background};
+ border: 1px dashed ${p => p.theme.border};
+ height: 5rem;
+ overflow: hidden;
+
+ &:before {
+ content: '';
+ position: absolute;
+ inset: 0;
+ background: linear-gradient(
+ 90deg,
+ transparent,
+ ${p => p.theme.active}20,
+ transparent
+ );
+ background-size: 2000px 100%;
+ animation: ${shimmer} 2s infinite linear;
+ pointer-events: none;
+ }
+`;
+
+const StreamContent = styled('div')`
+ margin: 0;
+ padding: ${space(2)};
+ white-space: pre-wrap;
+ word-break: break-word;
+ font-size: ${p => p.theme.fontSizeSmall};
+ color: ${p => p.theme.subText};
+ height: 5rem;
+ overflow-y: auto;
+ display: flex;
+ flex-direction: column-reverse;
+`;
+
+const StyledArrow = styled(IconArrow)`
+ color: ${p => p.theme.subText};
+ opacity: 0.5;
+`;
diff --git a/static/app/components/events/autofix/autofixRootCause.spec.tsx b/static/app/components/events/autofix/autofixRootCause.spec.tsx
index 96c376b83d814f..651a63d5df5d0f 100644
--- a/static/app/components/events/autofix/autofixRootCause.spec.tsx
+++ b/static/app/components/events/autofix/autofixRootCause.spec.tsx
@@ -66,7 +66,7 @@ describe('AutofixRootCause', function () {
/>
);
- expect(screen.queryByRole('link', {name: 'GitHub'})).toBeInTheDocument();
+ expect(screen.getByRole('link', {name: 'GitHub'})).toBeInTheDocument();
expect(screen.queryByRole('link', {name: 'GitHub'})).toHaveAttribute(
'href',
'https://github.com/test_owner/test_repo/blob/main/src/file.py'
diff --git a/static/app/components/events/autofix/autofixRootCause.tsx b/static/app/components/events/autofix/autofixRootCause.tsx
index 2a301cef677d0f..53ff3ca1b014a1 100644
--- a/static/app/components/events/autofix/autofixRootCause.tsx
+++ b/static/app/components/events/autofix/autofixRootCause.tsx
@@ -383,7 +383,7 @@ function AutofixRootCauseDisplay({
rootCauseSelection,
repos,
}: AutofixRootCauseProps) {
- const [selectedId, setSelectedId] = useState(() => causes[0].id);
+ const [selectedId, setSelectedId] = useState(() => causes[0]!.id);
const {isPending, mutate: handleSelectFix} = useSelectCause({groupId, runId});
if (rootCauseSelection) {
@@ -468,10 +468,24 @@ function AutofixRootCauseDisplay({
}
const cardAnimationProps: AnimationProps = {
- exit: {opacity: 0},
- initial: {opacity: 0, y: 20},
- animate: {opacity: 1, y: 0},
- transition: testableTransition({duration: 0.3}),
+ exit: {opacity: 0, height: 0, scale: 0.8, y: -20},
+ initial: {opacity: 0, height: 0, scale: 0.8},
+ animate: {opacity: 1, height: 'auto', scale: 1},
+ transition: testableTransition({
+ duration: 1.0,
+ height: {
+ type: 'spring',
+ bounce: 0.2,
+ },
+ scale: {
+ type: 'spring',
+ bounce: 0.2,
+ },
+ y: {
+ type: 'tween',
+ ease: 'easeOut',
+ },
+ }),
};
export function AutofixRootCause(props: AutofixRootCauseProps) {
@@ -627,7 +641,9 @@ const ContentWrapper = styled(motion.div)<{selected: boolean}>`
}
`;
-const AnimationWrapper = styled(motion.div)``;
+const AnimationWrapper = styled(motion.div)`
+ transform-origin: top center;
+`;
const CustomRootCausePadding = styled('div')`
padding: ${space(2)} ${space(2)} ${space(2)} ${space(2)};
diff --git a/static/app/components/events/autofix/autofixSteps.spec.tsx b/static/app/components/events/autofix/autofixSteps.spec.tsx
index 18b0067e8007a5..df4453de4195ec 100644
--- a/static/app/components/events/autofix/autofixSteps.spec.tsx
+++ b/static/app/components/events/autofix/autofixSteps.spec.tsx
@@ -62,9 +62,7 @@ describe('AutofixSteps', () => {
render( );
expect(screen.getByText('Root cause 1')).toBeInTheDocument();
- expect(
- screen.getByPlaceholderText('(Optional) Provide any instructions for the fix...')
- ).toBeInTheDocument();
+ expect(screen.getByText('Use suggested root cause')).toBeInTheDocument();
});
it('handles root cause selection', async () => {
@@ -76,6 +74,8 @@ describe('AutofixSteps', () => {
render( );
+ await userEvent.click(screen.getByRole('button', {name: 'Use suggested root cause'}));
+
const input = screen.getByPlaceholderText(
'(Optional) Provide any instructions for the fix...'
);
@@ -98,6 +98,7 @@ describe('AutofixSteps', () => {
render( );
+ await userEvent.click(screen.getByRole('button', {name: 'Use suggested root cause'}));
await userEvent.click(screen.getByRole('button', {name: 'Find a Fix'}));
await waitFor(() => {
@@ -107,9 +108,11 @@ describe('AutofixSteps', () => {
});
});
- it('renders AutofixMessageBox with correct props', () => {
+ it('renders AutofixMessageBox with correct props', async () => {
render( );
+ await userEvent.click(screen.getByRole('button', {name: 'Use suggested root cause'}));
+
const messageBox = screen.getByPlaceholderText(
'(Optional) Provide any instructions for the fix...'
);
@@ -150,7 +153,8 @@ describe('AutofixSteps', () => {
it('handles iterating on changes step', async () => {
MockApiClient.addMockResponse({
- url: '/issues/group1/autofix/setup/',
+ url: '/issues/group1/autofix/setup/?check_write_access=true',
+ method: 'GET',
body: {
genAIConsent: {ok: true},
integration: {ok: true},
@@ -194,7 +198,9 @@ describe('AutofixSteps', () => {
render( );
- const input = screen.getByPlaceholderText('Share helpful context or feedback...');
+ await userEvent.click(screen.getByRole('button', {name: 'Iterate'}));
+
+ const input = screen.getByPlaceholderText('Share helpful context or directions...');
await userEvent.type(input, 'Feedback on changes');
await userEvent.click(screen.getByRole('button', {name: 'Send'}));
diff --git a/static/app/components/events/autofix/autofixSteps.tsx b/static/app/components/events/autofix/autofixSteps.tsx
index 37a5879485a631..490b02474edab4 100644
--- a/static/app/components/events/autofix/autofixSteps.tsx
+++ b/static/app/components/events/autofix/autofixSteps.tsx
@@ -7,6 +7,7 @@ import AutofixInsightCards, {
useUpdateInsightCard,
} from 'sentry/components/events/autofix/autofixInsightCards';
import AutofixMessageBox from 'sentry/components/events/autofix/autofixMessageBox';
+import {AutofixOutputStream} from 'sentry/components/events/autofix/autofixOutputStream';
import {
AutofixRootCause,
useSelectCause,
@@ -135,11 +136,11 @@ export function AutofixSteps({data, groupId, runId}: AutofixStepsProps) {
if (!steps) {
return;
}
- const step = steps[steps.length - 1];
+ const step = steps[steps.length - 1]!;
if (step.type !== AutofixStepType.ROOT_CAUSE_ANALYSIS) {
return;
}
- const cause = step.causes[0];
+ const cause = step.causes[0]!;
const id = cause.id;
handleSelectFix({causeId: id, instruction: text});
}
@@ -163,8 +164,8 @@ export function AutofixSteps({data, groupId, runId}: AutofixStepsProps) {
useEffect(() => {
const observer = new IntersectionObserver(
([entry]) => {
- setIsBottomVisible(entry.isIntersecting);
- if (entry.isIntersecting) {
+ setIsBottomVisible(entry!.isIntersecting);
+ if (entry!.isIntersecting) {
setHasSeenBottom(true);
}
},
@@ -193,7 +194,7 @@ export function AutofixSteps({data, groupId, runId}: AutofixStepsProps) {
const hasNewSteps =
currentStepsLength > prevStepsLengthRef.current &&
- steps[currentStepsLength - 1].type !== AutofixStepType.DEFAULT;
+ steps[currentStepsLength - 1]!.type !== AutofixStepType.DEFAULT;
const hasNewInsights = currentInsightsCount > prevInsightsCountRef.current;
if (hasNewSteps || hasNewInsights) {
@@ -216,16 +217,18 @@ export function AutofixSteps({data, groupId, runId}: AutofixStepsProps) {
}
const lastStep = steps[steps.length - 1];
- const logs: AutofixProgressItem[] = lastStep.progress?.filter(isProgressLog) ?? [];
+ const logs: AutofixProgressItem[] = lastStep!.progress?.filter(isProgressLog) ?? [];
const activeLog =
- lastStep.completedMessage ?? replaceHeadersWithBold(logs.at(-1)?.message ?? '') ?? '';
+ lastStep!.completedMessage ??
+ replaceHeadersWithBold(logs.at(-1)?.message ?? '') ??
+ '';
const isRootCauseSelectionStep =
- lastStep.type === AutofixStepType.ROOT_CAUSE_ANALYSIS &&
- lastStep.status === 'COMPLETED';
+ lastStep!.type === AutofixStepType.ROOT_CAUSE_ANALYSIS &&
+ lastStep!.status === 'COMPLETED';
const isChangesStep =
- lastStep.type === AutofixStepType.CHANGES && lastStep.status === 'COMPLETED';
+ lastStep!.type === AutofixStepType.CHANGES && lastStep!.status === 'COMPLETED';
return (
@@ -280,12 +283,15 @@ export function AutofixSteps({data, groupId, runId}: AutofixStepsProps) {
);
})}
+ {lastStep!.output_stream && (
+
+ )}
[
`/issues/${groupId}/autofix/`,
diff --git a/static/app/components/events/breadcrumbs/breadcrumbsDataSection.spec.tsx b/static/app/components/events/breadcrumbs/breadcrumbsDataSection.spec.tsx
index 13a3db275b0791..8f42fe8f0908f9 100644
--- a/static/app/components/events/breadcrumbs/breadcrumbsDataSection.spec.tsx
+++ b/static/app/components/events/breadcrumbs/breadcrumbsDataSection.spec.tsx
@@ -36,17 +36,17 @@ describe('BreadcrumbsDataSection', function () {
// Only summary crumbs should be visible by default
const summaryCrumbTitles = [
'Exception',
- MOCK_BREADCRUMBS[5].category,
- MOCK_BREADCRUMBS[4].category,
- MOCK_BREADCRUMBS[3].category,
- MOCK_BREADCRUMBS[2].category,
+ MOCK_BREADCRUMBS[5]!.category,
+ MOCK_BREADCRUMBS[4]!.category,
+ MOCK_BREADCRUMBS[3]!.category,
+ MOCK_BREADCRUMBS[2]!.category,
];
for (const crumbTitle of summaryCrumbTitles) {
expect(screen.getByText(crumbTitle)).toBeInTheDocument();
}
const hiddenCrumbTitles = [
- MOCK_BREADCRUMBS[1].category,
- MOCK_BREADCRUMBS[0].category,
+ MOCK_BREADCRUMBS[1]!.category,
+ MOCK_BREADCRUMBS[0]!.category,
];
for (const crumbTitle of hiddenCrumbTitles) {
expect(screen.queryByText(crumbTitle)).not.toBeInTheDocument();
diff --git a/static/app/components/events/breadcrumbs/breadcrumbsDrawer.spec.tsx b/static/app/components/events/breadcrumbs/breadcrumbsDrawer.spec.tsx
index c152ff6c839fd8..25eb4c9d4eb50c 100644
--- a/static/app/components/events/breadcrumbs/breadcrumbsDrawer.spec.tsx
+++ b/static/app/components/events/breadcrumbs/breadcrumbsDrawer.spec.tsx
@@ -23,42 +23,50 @@ async function renderBreadcrumbDrawer() {
}));
render( );
await userEvent.click(screen.getByRole('button', {name: 'View All Breadcrumbs'}));
- return within(screen.getByRole('complementary', {name: 'breadcrumb drawer'}));
+ return screen.getByRole('complementary', {name: 'breadcrumb drawer'});
}
describe('BreadcrumbsDrawer', function () {
it('renders the drawer as expected', async function () {
const drawerScreen = await renderBreadcrumbDrawer();
- expect(drawerScreen.getByRole('button', {name: 'Close Drawer'})).toBeInTheDocument();
+ expect(
+ within(drawerScreen).getByRole('button', {name: 'Close Drawer'})
+ ).toBeInTheDocument();
// Inner drawer breadcrumbs
const {event, group} = MOCK_DATA_SECTION_PROPS;
- expect(drawerScreen.getByText(group.shortId)).toBeInTheDocument();
- expect(drawerScreen.getByText(event.id.slice(0, 8))).toBeInTheDocument();
- expect(drawerScreen.getByText('Breadcrumbs', {selector: 'span'})).toBeInTheDocument();
+ expect(within(drawerScreen).getByText(group.shortId)).toBeInTheDocument();
+ expect(within(drawerScreen).getByText(event.id.slice(0, 8))).toBeInTheDocument();
+ expect(
+ within(drawerScreen).getByText('Breadcrumbs', {selector: 'span'})
+ ).toBeInTheDocument();
// Header & Controls
- expect(drawerScreen.getByText('Breadcrumbs', {selector: 'h3'})).toBeInTheDocument();
expect(
- drawerScreen.getByRole('textbox', {name: 'Search All Breadcrumbs'})
+ within(drawerScreen).getByText('Breadcrumbs', {selector: 'h3'})
).toBeInTheDocument();
expect(
- drawerScreen.getByRole('button', {name: 'Sort All Breadcrumbs'})
+ within(drawerScreen).getByRole('textbox', {name: 'Search All Breadcrumbs'})
).toBeInTheDocument();
expect(
- drawerScreen.getByRole('button', {name: 'Filter All Breadcrumbs'})
+ within(drawerScreen).getByRole('button', {name: 'Sort All Breadcrumbs'})
).toBeInTheDocument();
expect(
- drawerScreen.getByRole('button', {name: 'Change Time Format for All Breadcrumbs'})
+ within(drawerScreen).getByRole('button', {name: 'Filter All Breadcrumbs'})
+ ).toBeInTheDocument();
+ expect(
+ within(drawerScreen).getByRole('button', {
+ name: 'Change Time Format for All Breadcrumbs',
+ })
).toBeInTheDocument();
// Contents
for (const {category, level, message} of MOCK_BREADCRUMBS) {
- expect(drawerScreen.getByText(category)).toBeInTheDocument();
- expect(drawerScreen.getByText(level)).toBeInTheDocument();
- expect(drawerScreen.getByText(message)).toBeInTheDocument();
+ expect(within(drawerScreen).getByText(category)).toBeInTheDocument();
+ expect(within(drawerScreen).getByText(level)).toBeInTheDocument();
+ expect(within(drawerScreen).getByText(message)).toBeInTheDocument();
}
- expect(drawerScreen.getAllByText('06:00:48.760 PM')).toHaveLength(
+ expect(within(drawerScreen).getAllByText('06:00:48.760 PM')).toHaveLength(
MOCK_BREADCRUMBS.length
);
});
@@ -67,16 +75,16 @@ describe('BreadcrumbsDrawer', function () {
const drawerScreen = await renderBreadcrumbDrawer();
const [warningCrumb, logCrumb] = MOCK_BREADCRUMBS;
- expect(drawerScreen.getByText(warningCrumb.category)).toBeInTheDocument();
- expect(drawerScreen.getByText(logCrumb.category)).toBeInTheDocument();
+ expect(within(drawerScreen).getByText(warningCrumb.category)).toBeInTheDocument();
+ expect(within(drawerScreen).getByText(logCrumb.category)).toBeInTheDocument();
- const searchInput = drawerScreen.getByRole('textbox', {
+ const searchInput = within(drawerScreen).getByRole('textbox', {
name: 'Search All Breadcrumbs',
});
await userEvent.type(searchInput, warningCrumb.message);
- expect(drawerScreen.getByText(warningCrumb.category)).toBeInTheDocument();
- expect(drawerScreen.queryByText(logCrumb.category)).not.toBeInTheDocument();
+ expect(within(drawerScreen).getByText(warningCrumb.category)).toBeInTheDocument();
+ expect(within(drawerScreen).queryByText(logCrumb.category)).not.toBeInTheDocument();
});
it('allows type filter to affect displayed crumbs', async function () {
@@ -84,16 +92,18 @@ describe('BreadcrumbsDrawer', function () {
const queryCrumb = MOCK_BREADCRUMBS[3];
const requestCrumb = MOCK_BREADCRUMBS[2];
- expect(drawerScreen.getByText(queryCrumb.category)).toBeInTheDocument();
- expect(drawerScreen.getByText(requestCrumb.category)).toBeInTheDocument();
+ expect(within(drawerScreen).getByText(queryCrumb.category)).toBeInTheDocument();
+ expect(within(drawerScreen).getByText(requestCrumb.category)).toBeInTheDocument();
await userEvent.click(
- drawerScreen.getByRole('button', {name: 'Filter All Breadcrumbs'})
+ within(drawerScreen).getByRole('button', {name: 'Filter All Breadcrumbs'})
);
- await userEvent.click(drawerScreen.getByRole('option', {name: 'Query'}));
+ await userEvent.click(within(drawerScreen).getByRole('option', {name: 'Query'}));
- expect(drawerScreen.getByText(queryCrumb.category)).toBeInTheDocument();
- expect(drawerScreen.queryByText(requestCrumb.category)).not.toBeInTheDocument();
+ expect(within(drawerScreen).getByText(queryCrumb.category)).toBeInTheDocument();
+ expect(
+ within(drawerScreen).queryByText(requestCrumb.category)
+ ).not.toBeInTheDocument();
});
it('allows level spofilter to affect displayed crumbs', async function () {
@@ -101,16 +111,16 @@ describe('BreadcrumbsDrawer', function () {
const [warningCrumb, logCrumb] = MOCK_BREADCRUMBS;
- expect(drawerScreen.getByText(warningCrumb.category)).toBeInTheDocument();
- expect(drawerScreen.getByText(logCrumb.category)).toBeInTheDocument();
+ expect(within(drawerScreen).getByText(warningCrumb.category)).toBeInTheDocument();
+ expect(within(drawerScreen).getByText(logCrumb.category)).toBeInTheDocument();
await userEvent.click(
- drawerScreen.getByRole('button', {name: 'Filter All Breadcrumbs'})
+ within(drawerScreen).getByRole('button', {name: 'Filter All Breadcrumbs'})
);
- await userEvent.click(drawerScreen.getByRole('option', {name: 'warning'}));
+ await userEvent.click(within(drawerScreen).getByRole('option', {name: 'warning'}));
- expect(drawerScreen.getByText(warningCrumb.category)).toBeInTheDocument();
- expect(drawerScreen.queryByText(logCrumb.category)).not.toBeInTheDocument();
+ expect(within(drawerScreen).getByText(warningCrumb.category)).toBeInTheDocument();
+ expect(within(drawerScreen).queryByText(logCrumb.category)).not.toBeInTheDocument();
});
it('allows sort dropdown to affect displayed crumbs', async function () {
@@ -119,54 +129,56 @@ describe('BreadcrumbsDrawer', function () {
const [warningCrumb, logCrumb] = MOCK_BREADCRUMBS;
expect(
- drawerScreen
+ within(drawerScreen)
.getByText(warningCrumb.category)
- .compareDocumentPosition(drawerScreen.getByText(logCrumb.category))
+ .compareDocumentPosition(within(drawerScreen).getByText(logCrumb.category))
).toBe(document.DOCUMENT_POSITION_PRECEDING);
- const sortControl = drawerScreen.getByRole('button', {
+ const sortControl = within(drawerScreen).getByRole('button', {
name: 'Sort All Breadcrumbs',
});
await userEvent.click(sortControl);
- await userEvent.click(drawerScreen.getByRole('option', {name: 'Oldest'}));
+ await userEvent.click(within(drawerScreen).getByRole('option', {name: 'Oldest'}));
expect(
- drawerScreen
+ within(drawerScreen)
.getByText(warningCrumb.category)
- .compareDocumentPosition(drawerScreen.getByText(logCrumb.category))
+ .compareDocumentPosition(within(drawerScreen).getByText(logCrumb.category))
).toBe(document.DOCUMENT_POSITION_FOLLOWING);
await userEvent.click(sortControl);
- await userEvent.click(drawerScreen.getByRole('option', {name: 'Newest'}));
+ await userEvent.click(within(drawerScreen).getByRole('option', {name: 'Newest'}));
expect(
- drawerScreen
+ within(drawerScreen)
.getByText(warningCrumb.category)
- .compareDocumentPosition(drawerScreen.getByText(logCrumb.category))
+ .compareDocumentPosition(within(drawerScreen).getByText(logCrumb.category))
).toBe(document.DOCUMENT_POSITION_PRECEDING);
});
it('allows time display dropdown to change all displayed crumbs', async function () {
const drawerScreen = await renderBreadcrumbDrawer();
- expect(drawerScreen.getAllByText('06:00:48.760 PM')).toHaveLength(
+ expect(within(drawerScreen).getAllByText('06:00:48.760 PM')).toHaveLength(
MOCK_BREADCRUMBS.length
);
- expect(drawerScreen.queryByText('-1min 2ms')).not.toBeInTheDocument();
- const timeControl = drawerScreen.getByRole('button', {
+ expect(within(drawerScreen).queryByText('-1min 2ms')).not.toBeInTheDocument();
+ const timeControl = within(drawerScreen).getByRole('button', {
name: 'Change Time Format for All Breadcrumbs',
});
await userEvent.click(timeControl);
- await userEvent.click(drawerScreen.getByRole('option', {name: 'Relative'}));
+ await userEvent.click(within(drawerScreen).getByRole('option', {name: 'Relative'}));
- expect(drawerScreen.queryByText('06:00:48.760 PM')).not.toBeInTheDocument();
- expect(drawerScreen.getAllByText('-1min 2ms')).toHaveLength(MOCK_BREADCRUMBS.length);
+ expect(within(drawerScreen).queryByText('06:00:48.760 PM')).not.toBeInTheDocument();
+ expect(within(drawerScreen).getAllByText('-1min 2ms')).toHaveLength(
+ MOCK_BREADCRUMBS.length
+ );
await userEvent.click(timeControl);
- await userEvent.click(drawerScreen.getByRole('option', {name: 'Absolute'}));
+ await userEvent.click(within(drawerScreen).getByRole('option', {name: 'Absolute'}));
- expect(drawerScreen.getAllByText('06:00:48.760 PM')).toHaveLength(
+ expect(within(drawerScreen).getAllByText('06:00:48.760 PM')).toHaveLength(
MOCK_BREADCRUMBS.length
);
- expect(drawerScreen.queryByText('-1min 2ms')).not.toBeInTheDocument();
+ expect(within(drawerScreen).queryByText('-1min 2ms')).not.toBeInTheDocument();
});
});
diff --git a/static/app/components/events/breadcrumbs/breadcrumbsTimeline.tsx b/static/app/components/events/breadcrumbs/breadcrumbsTimeline.tsx
index 54e046ade6d4ee..eace7b853cae3d 100644
--- a/static/app/components/events/breadcrumbs/breadcrumbsTimeline.tsx
+++ b/static/app/components/events/breadcrumbs/breadcrumbsTimeline.tsx
@@ -70,7 +70,7 @@ export default function BreadcrumbsTimeline({
const virtualItems = virtualizer.getVirtualItems();
const items = virtualItems.map(virtualizedRow => {
const {breadcrumb, raw, title, meta, iconComponent, colorConfig, levelComponent} =
- breadcrumbs[virtualizedRow.index];
+ breadcrumbs[virtualizedRow.index]!;
const isVirtualCrumb = !defined(raw);
const timeDate = new Date(breadcrumb.timestamp ?? '');
diff --git a/static/app/components/events/breadcrumbs/testUtils.tsx b/static/app/components/events/breadcrumbs/testUtils.tsx
index 677cebb470a448..6a5ea4a70fc3ed 100644
--- a/static/app/components/events/breadcrumbs/testUtils.tsx
+++ b/static/app/components/events/breadcrumbs/testUtils.tsx
@@ -49,7 +49,7 @@ export const MOCK_BREADCRUMBS = [
type: BreadcrumbType.DEFAULT,
timestamp: oneMinuteBeforeEventFixture,
},
-];
+] as const;
const MOCK_BREADCRUMB_ENTRY = {
type: EntryType.BREADCRUMBS,
data: {
diff --git a/static/app/components/events/breadcrumbs/utils.tsx b/static/app/components/events/breadcrumbs/utils.tsx
index d14345302ef1e6..641b82e68d9ea5 100644
--- a/static/app/components/events/breadcrumbs/utils.tsx
+++ b/static/app/components/events/breadcrumbs/utils.tsx
@@ -143,9 +143,9 @@ export function useBreadcrumbFilters(crumbs: EnhancedCrumb[]) {
options.forEach(optionValue => {
const [indicator, value] = optionValue.split('-');
if (indicator === 'type') {
- typeFilterSet.add(value);
+ typeFilterSet.add(value!);
} else if (indicator === 'level') {
- levelFilterSet.add(value);
+ levelFilterSet.add(value!);
}
});
diff --git a/static/app/components/events/contexts/contextCard.spec.tsx b/static/app/components/events/contexts/contextCard.spec.tsx
index fa6e0fd93280c6..c716b4652f0e19 100644
--- a/static/app/components/events/contexts/contextCard.spec.tsx
+++ b/static/app/components/events/contexts/contextCard.spec.tsx
@@ -76,7 +76,7 @@ describe('ContextCard', function () {
project={project}
/>
);
- expect(iconSpy.mock.calls[0][0]).toBe(browserContext.name);
+ expect(iconSpy.mock.calls[0]![0]).toBe(browserContext.name);
expect(screen.getByRole('img')).toBeInTheDocument();
iconSpy.mockReset();
diff --git a/static/app/components/events/contexts/index.tsx b/static/app/components/events/contexts/index.tsx
index 0f3cafedcfe7e7..1e4e2c5e3a40bd 100644
--- a/static/app/components/events/contexts/index.tsx
+++ b/static/app/components/events/contexts/index.tsx
@@ -62,7 +62,7 @@ export function getOrderedContextItems(event: Event): ContextItem[] {
return !isInvalid;
})
.map(([alias, ctx]) => ({
- alias: alias,
+ alias,
type: overrideTypesWithAliases.has(alias) ? alias : ctx?.type,
value: ctx,
}));
diff --git a/static/app/components/events/contexts/knownContext/browser.spec.tsx b/static/app/components/events/contexts/knownContext/browser.spec.tsx
index d42e6c3b02a949..7018b6581b0ba7 100644
--- a/static/app/components/events/contexts/knownContext/browser.spec.tsx
+++ b/static/app/components/events/contexts/knownContext/browser.spec.tsx
@@ -8,7 +8,7 @@ import {
getBrowserContextData,
} from 'sentry/components/events/contexts/knownContext/browser';
-export const MOCK_BROWSER_CONTEXT: BrowserContext = {
+const MOCK_BROWSER_CONTEXT: BrowserContext = {
version: '83.0.4103',
type: 'browser',
name: '',
@@ -17,7 +17,7 @@ export const MOCK_BROWSER_CONTEXT: BrowserContext = {
unknown_key: 123,
};
-export const MOCK_REDACTION = {
+const MOCK_REDACTION = {
name: {
'': {
chunks: [
diff --git a/static/app/components/events/contexts/platformContext/unity.spec.tsx b/static/app/components/events/contexts/platformContext/unity.spec.tsx
index 5ef900a88519ee..8d920d535ba252 100644
--- a/static/app/components/events/contexts/platformContext/unity.spec.tsx
+++ b/static/app/components/events/contexts/platformContext/unity.spec.tsx
@@ -5,7 +5,7 @@ import {render, screen} from 'sentry-test/reactTestingLibrary';
import ContextCard from 'sentry/components/events/contexts/contextCard';
import {getUnityContextData} from 'sentry/components/events/contexts/platformContext/unity';
-export const MOCK_UNITY_CONTEXT = {
+const MOCK_UNITY_CONTEXT = {
type: 'unity' as const,
copy_texture_support: 'Basic, Copy3D, DifferentTypes, TextureToRT, RTToTexture',
editor_version: '2022.1.23f1',
@@ -17,7 +17,7 @@ export const MOCK_UNITY_CONTEXT = {
unknown_key: 123,
};
-export const MOCK_REDACTION = {
+const MOCK_REDACTION = {
install_mode: {
'': {
rem: [['organization:0', 'x']],
diff --git a/static/app/components/events/contexts/utils.spec.tsx b/static/app/components/events/contexts/utils.spec.tsx
index 968d5ae8eb6a74..a5a93cb9222225 100644
--- a/static/app/components/events/contexts/utils.spec.tsx
+++ b/static/app/components/events/contexts/utils.spec.tsx
@@ -154,10 +154,10 @@ describe('contexts utils', function () {
};
const knownStructuredData = getKnownStructuredData(knownData, errMeta);
- expect(knownData[0].key).toEqual(knownStructuredData[0].key);
- expect(knownData[0].subject).toEqual(knownStructuredData[0].subject);
- render({knownStructuredData[0].value as React.ReactNode} );
- expect(screen.getByText(`${knownData[0].value}`)).toBeInTheDocument();
+ expect(knownData[0]!.key).toEqual(knownStructuredData[0]!.key);
+ expect(knownData[0]!.subject).toEqual(knownStructuredData[0]!.subject);
+ render({knownStructuredData[0]!.value as React.ReactNode} );
+ expect(screen.getByText(`${knownData[0]!.value}`)).toBeInTheDocument();
expect(screen.getByTestId('annotated-text-error-icon')).toBeInTheDocument();
});
});
diff --git a/static/app/components/events/contexts/utils.tsx b/static/app/components/events/contexts/utils.tsx
index 8ad210103a7935..8d79c598b8f066 100644
--- a/static/app/components/events/contexts/utils.tsx
+++ b/static/app/components/events/contexts/utils.tsx
@@ -80,21 +80,21 @@ export function generateIconName(
}
const formattedName = name
- .split(/\d/)[0]
+ .split(/\d/)[0]!
.toLowerCase()
.replace(/[^a-z0-9\-]+/g, '-')
.replace(/\-+$/, '')
.replace(/^\-+/, '');
if (formattedName === 'edge' && version) {
- const majorVersion = version.split('.')[0];
+ const majorVersion = version.split('.')[0]!;
const isLegacyEdge = majorVersion >= '12' && majorVersion <= '18';
return isLegacyEdge ? 'legacy-edge' : 'edge';
}
if (formattedName.endsWith('-mobile')) {
- return formattedName.split('-')[0];
+ return formattedName.split('-')[0]!;
}
return formattedName;
diff --git a/static/app/components/events/errorItem.spec.tsx b/static/app/components/events/errorItem.spec.tsx
index c3d2b89bf85bcf..d4a39b7a205bce 100644
--- a/static/app/components/events/errorItem.spec.tsx
+++ b/static/app/components/events/errorItem.spec.tsx
@@ -50,7 +50,7 @@ describe('Issue error item', function () {
expect(screen.getByText('File Path')).toBeInTheDocument();
expect(screen.getAllByText(/redacted/)).toHaveLength(2);
- await userEvent.hover(screen.getAllByText(/redacted/)[0]);
+ await userEvent.hover(screen.getAllByText(/redacted/)[0]!);
expect(
await screen.findByText(
diff --git a/static/app/components/events/eventAttachments.spec.tsx b/static/app/components/events/eventAttachments.spec.tsx
index 1788979fc11c8c..113d9d8d350ced 100644
--- a/static/app/components/events/eventAttachments.spec.tsx
+++ b/static/app/components/events/eventAttachments.spec.tsx
@@ -27,11 +27,11 @@ describe('EventAttachments', function () {
const props = {
group: undefined,
- project: project,
+ project: project!,
event,
};
- const attachmentsUrl = `/projects/${organization.slug}/${project.slug}/events/${event.id}/attachments/`;
+ const attachmentsUrl = `/projects/${organization.slug}/${project!.slug}/events/${event.id}/attachments/`;
beforeEach(() => {
ConfigStore.loadInitialData(ConfigFixture());
@@ -60,7 +60,7 @@ describe('EventAttachments', function () {
expect(screen.getByRole('link', {name: 'configure limit'})).toHaveAttribute(
'href',
- `/settings/org-slug/projects/${project.slug}/security-and-privacy/`
+ `/settings/org-slug/projects/${project!.slug}/security-and-privacy/`
);
expect(
@@ -144,7 +144,7 @@ describe('EventAttachments', function () {
});
MockApiClient.addMockResponse({
- url: `/projects/${organization.slug}/${project.slug}/events/${event.id}/attachments/1/?download`,
+ url: `/projects/${organization.slug}/${project!.slug}/events/${event.id}/attachments/1/?download`,
body: 'file contents',
});
@@ -180,7 +180,7 @@ describe('EventAttachments', function () {
expect(await screen.findByText('Attachments (2)')).toBeInTheDocument();
- await userEvent.click(screen.getAllByRole('button', {name: 'Delete'})[0]);
+ await userEvent.click(screen.getAllByRole('button', {name: 'Delete'})[0]!);
await userEvent.click(
within(screen.getByRole('dialog')).getByRole('button', {name: /delete/i})
);
@@ -188,7 +188,7 @@ describe('EventAttachments', function () {
// Should make the delete request and remove the attachment optimistically
await waitFor(() => {
expect(deleteMock).toHaveBeenCalled();
- expect(screen.queryByTestId('pic_1.png')).not.toBeInTheDocument();
});
+ expect(screen.queryByTestId('pic_1.png')).not.toBeInTheDocument();
});
});
diff --git a/static/app/components/events/eventCustomPerformanceMetrics.spec.tsx b/static/app/components/events/eventCustomPerformanceMetrics.spec.tsx
index c80372b9bfdc62..236be349d998ec 100644
--- a/static/app/components/events/eventCustomPerformanceMetrics.spec.tsx
+++ b/static/app/components/events/eventCustomPerformanceMetrics.spec.tsx
@@ -5,12 +5,8 @@ import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
import EventCustomPerformanceMetrics from 'sentry/components/events/eventCustomPerformanceMetrics';
import type {Event} from 'sentry/types/event';
-import {browserHistory} from 'sentry/utils/browserHistory';
describe('EventCustomPerformanceMetrics', function () {
- beforeEach(function () {
- browserHistory.push = jest.fn();
- });
it('should not render anything', function () {
const {router, organization} = initializeOrg();
render(
diff --git a/static/app/components/events/eventDrawer.tsx b/static/app/components/events/eventDrawer.tsx
index 59dd58926a59ec..fb26d789b2c55f 100644
--- a/static/app/components/events/eventDrawer.tsx
+++ b/static/app/components/events/eventDrawer.tsx
@@ -47,6 +47,9 @@ export const EventDrawerHeader = styled(DrawerHeader)`
max-height: ${MIN_NAV_HEIGHT}px;
box-shadow: none;
border-bottom: 1px solid ${p => p.theme.border};
+ overflow: hidden;
+ text-overflow: ellipsis;
+ max-width: 100%;
`;
export const EventNavigator = styled('div')`
diff --git a/static/app/components/events/eventEntries.tsx b/static/app/components/events/eventEntries.tsx
index 609738ce596d10..bdc4d64addcafa 100644
--- a/static/app/components/events/eventEntries.tsx
+++ b/static/app/components/events/eventEntries.tsx
@@ -8,11 +8,10 @@ import EventReplay from 'sentry/components/events/eventReplay';
import {EventGroupingInfoSection} from 'sentry/components/events/groupingInfo/groupingInfoSection';
import {ActionableItems} from 'sentry/components/events/interfaces/crashContent/exception/actionableItems';
import {actionableItemsEnabled} from 'sentry/components/events/interfaces/crashContent/exception/useActionableItems';
-import {CustomMetricsEventData} from 'sentry/components/metrics/customMetricsEventData';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import type {Entry, Event} from 'sentry/types/event';
-import {EntryType, EventOrGroupType} from 'sentry/types/event';
+import {EntryType} from 'sentry/types/event';
import type {Group} from 'sentry/types/group';
import type {Organization, SharedViewOrganization} from 'sentry/types/organization';
import type {Project} from 'sentry/types/project';
@@ -119,13 +118,6 @@ function EventEntries({
{!isShare && }
{!isShare && }
- {event.type === EventOrGroupType.TRANSACTION && event._metrics_summary && (
-
- )}
{!isShare && event.groupID && (
{!hideBeforeReplayEntries &&
- beforeReplayEntries.map((entry, entryIdx) => (
+ beforeReplayEntries!.map((entry, entryIdx) => (
))}
{!isShare && }
{!isShare && }
- {afterReplayEntries.map((entry, entryIdx) => {
+ {afterReplayEntries!.map((entry, entryIdx) => {
if (hideBreadCrumbs && entry.type === EntryType.BREADCRUMBS) {
return null;
}
diff --git a/static/app/components/events/eventExtraData/index.spec.tsx b/static/app/components/events/eventExtraData/index.spec.tsx
index 0038ce1445fa49..1dd2c9d9bc429e 100644
--- a/static/app/components/events/eventExtraData/index.spec.tsx
+++ b/static/app/components/events/eventExtraData/index.spec.tsx
@@ -181,7 +181,7 @@ describe('EventExtraData', function () {
await userEvent.click(screen.getByRole('button', {name: 'Expand'}));
expect(await screen.findAllByText(/redacted/)).toHaveLength(10);
- await userEvent.hover(screen.getAllByText(/redacted/)[0]);
+ await userEvent.hover(screen.getAllByText(/redacted/)[0]!);
expect(
await screen.findByText(
diff --git a/static/app/components/events/eventHydrationDiff/replayDiffContent.tsx b/static/app/components/events/eventHydrationDiff/replayDiffContent.tsx
index 3e7e89261d8309..04eebfc1885f00 100644
--- a/static/app/components/events/eventHydrationDiff/replayDiffContent.tsx
+++ b/static/app/components/events/eventHydrationDiff/replayDiffContent.tsx
@@ -7,7 +7,7 @@ import {t} from 'sentry/locale';
import type {Event} from 'sentry/types/event';
import type {Group} from 'sentry/types/group';
import {getReplayDiffOffsetsFromEvent} from 'sentry/utils/replays/getDiffTimestamps';
-import useReplayReader from 'sentry/utils/replays/hooks/useReplayReader';
+import useLoadReplayReader from 'sentry/utils/replays/hooks/useLoadReplayReader';
import {SectionKey} from 'sentry/views/issueDetails/streamline/context';
import {InterimSection} from 'sentry/views/issueDetails/streamline/interimSection';
@@ -19,7 +19,7 @@ interface Props {
}
export default function ReplayDiffContent({event, group, orgSlug, replaySlug}: Props) {
- const replayContext = useReplayReader({
+ const replayContext = useLoadReplayReader({
orgSlug,
replaySlug,
});
diff --git a/static/app/components/events/eventProcessingErrors.tsx b/static/app/components/events/eventProcessingErrors.tsx
index 76aa7b59af88e8..5992862ebbeb92 100644
--- a/static/app/components/events/eventProcessingErrors.tsx
+++ b/static/app/components/events/eventProcessingErrors.tsx
@@ -45,47 +45,43 @@ export default function EventErrorCard({
function EventErrorDescription({error}: {error: ErrorMessage}) {
const {title, data: errorData} = error;
- const cleanedData = useMemo(
- () => {
- const data = errorData || {};
- if (data.message === 'None') {
- // Python ensures a message string, but "None" doesn't make sense here
- delete data.message;
- }
-
- if (typeof data.image_path === 'string') {
- // Separate the image name for readability
- const separator = /^([a-z]:\\|\\\\)/i.test(data.image_path) ? '\\' : '/';
- const path = data.image_path.split(separator);
- data.image_name = path.splice(-1, 1)[0];
- data.image_path = path.length ? path.join(separator) + separator : '';
- }
-
- if (typeof data.server_time === 'string' && typeof data.sdk_time === 'string') {
- data.message = t(
- 'Adjusted timestamps by %s',
- moment
- .duration(moment.utc(data.server_time).diff(moment.utc(data.sdk_time)))
- .humanize()
- );
- }
-
- return Object.entries(data)
- .map(([key, value]) => ({
- key,
- value,
- subject: keyMapping[key] || startCase(key),
- }))
- .filter(d => {
- if (!d.value) {
- return true;
- }
- return !!d.value;
- });
- },
- // eslint-disable-next-line react-hooks/exhaustive-deps
- [errorData]
- );
+ const cleanedData = useMemo(() => {
+ const data = errorData || {};
+ if (data.message === 'None') {
+ // Python ensures a message string, but "None" doesn't make sense here
+ delete data.message;
+ }
+
+ if (typeof data.image_path === 'string') {
+ // Separate the image name for readability
+ const separator = /^([a-z]:\\|\\\\)/i.test(data.image_path) ? '\\' : '/';
+ const path = data.image_path.split(separator);
+ data.image_name = path.splice(-1, 1)[0];
+ data.image_path = path.length ? path.join(separator) + separator : '';
+ }
+
+ if (typeof data.server_time === 'string' && typeof data.sdk_time === 'string') {
+ data.message = t(
+ 'Adjusted timestamps by %s',
+ moment
+ .duration(moment.utc(data.server_time).diff(moment.utc(data.sdk_time)))
+ .humanize()
+ );
+ }
+
+ return Object.entries(data)
+ .map(([key, value]) => ({
+ key,
+ value,
+ subject: keyMapping[key] || startCase(key),
+ }))
+ .filter(d => {
+ if (!d.value) {
+ return true;
+ }
+ return !!d.value;
+ });
+ }, [errorData]);
return ;
}
diff --git a/static/app/components/events/eventReplay/index.spec.tsx b/static/app/components/events/eventReplay/index.spec.tsx
index f6d4b358ddc1d5..e989653ae5cd5a 100644
--- a/static/app/components/events/eventReplay/index.spec.tsx
+++ b/static/app/components/events/eventReplay/index.spec.tsx
@@ -8,17 +8,17 @@ import {ReplayRecordFixture} from 'sentry-fixture/replayRecord';
import {render, screen} from 'sentry-test/reactTestingLibrary';
import EventReplay from 'sentry/components/events/eventReplay';
+import useLoadReplayReader from 'sentry/utils/replays/hooks/useLoadReplayReader';
import {
useHaveSelectedProjectsSentAnyReplayEvents,
useReplayOnboardingSidebarPanel,
} from 'sentry/utils/replays/hooks/useReplayOnboarding';
-import useReplayReader from 'sentry/utils/replays/hooks/useReplayReader';
import ReplayReader from 'sentry/utils/replays/replayReader';
import useProjects from 'sentry/utils/useProjects';
import type {ReplayError} from 'sentry/views/replays/types';
jest.mock('sentry/utils/replays/hooks/useReplayOnboarding');
-jest.mock('sentry/utils/replays/hooks/useReplayReader');
+jest.mock('sentry/utils/replays/hooks/useLoadReplayReader');
jest.mock('sentry/utils/useProjects');
jest.mock('sentry/utils/replays/hooks/useReplayOnboarding');
// Replay clip preview is very heavy, mock it out
@@ -70,7 +70,7 @@ const mockReplay = ReplayReader.factory({
}),
});
-jest.mocked(useReplayReader).mockImplementation(() => {
+jest.mocked(useLoadReplayReader).mockImplementation(() => {
return {
attachments: [],
errors: mockErrors,
diff --git a/static/app/components/events/eventReplay/index.tsx b/static/app/components/events/eventReplay/index.tsx
index 383f6622815d7a..7910b45169568e 100644
--- a/static/app/components/events/eventReplay/index.tsx
+++ b/static/app/components/events/eventReplay/index.tsx
@@ -1,4 +1,4 @@
-import {lazy, useEffect} from 'react';
+import {lazy} from 'react';
import ErrorBoundary from 'sentry/components/errorBoundary';
import {ReplayClipSection} from 'sentry/components/events/eventReplay/replayClipSection';
@@ -8,7 +8,6 @@ import type {Group} from 'sentry/types/group';
import useEventCanShowReplayUpsell from 'sentry/utils/event/useEventCanShowReplayUpsell';
import {getReplayIdFromEvent} from 'sentry/utils/replays/getReplayIdFromEvent';
import {useHaveSelectedProjectsSentAnyReplayEvents} from 'sentry/utils/replays/hooks/useReplayOnboarding';
-import useUrlParams from 'sentry/utils/useUrlParams';
import {useIsSampleEvent} from 'sentry/views/issueDetails/utils';
interface Props {
@@ -29,14 +28,6 @@ export default function EventReplay({event, group, projectSlug}: Props) {
});
const isSampleError = useIsSampleEvent();
- const {setParamValue: setProjectId} = useUrlParams('project');
-
- useEffect(() => {
- if (canShowUpsell) {
- setProjectId(upsellProjectId);
- }
- }, [upsellProjectId, setProjectId, canShowUpsell]);
-
if (replayId) {
return ;
}
diff --git a/static/app/components/events/eventReplay/replayClipPreview.spec.tsx b/static/app/components/events/eventReplay/replayClipPreview.spec.tsx
index defb415050dbf1..769c060962e726 100644
--- a/static/app/components/events/eventReplay/replayClipPreview.spec.tsx
+++ b/static/app/components/events/eventReplay/replayClipPreview.spec.tsx
@@ -7,15 +7,15 @@ import {initializeOrg} from 'sentry-test/initializeOrg';
import {render as baseRender, screen, userEvent} from 'sentry-test/reactTestingLibrary';
import type {Organization} from 'sentry/types/organization';
-import useReplayReader from 'sentry/utils/replays/hooks/useReplayReader';
+import useLoadReplayReader from 'sentry/utils/replays/hooks/useLoadReplayReader';
import ReplayReader from 'sentry/utils/replays/replayReader';
import type RequestError from 'sentry/utils/requestError/requestError';
import ReplayClipPreview from './replayClipPreview';
-jest.mock('sentry/utils/replays/hooks/useReplayReader');
+jest.mock('sentry/utils/replays/hooks/useLoadReplayReader');
-const mockUseReplayReader = jest.mocked(useReplayReader);
+const mockUseLoadReplayReader = jest.mocked(useLoadReplayReader);
const mockOrgSlug = 'sentry-emerging-tech';
const mockReplaySlug = 'replays:761104e184c64d439ee1014b72b4d83b';
@@ -48,7 +48,7 @@ const mockReplay = ReplayReader.factory({
},
});
-mockUseReplayReader.mockImplementation(() => {
+mockUseLoadReplayReader.mockImplementation(() => {
return {
attachments: [],
errors: [],
@@ -120,7 +120,7 @@ describe('ReplayClipPreview', () => {
it('Should render a placeholder when is fetching the replay data', () => {
// Change the mocked hook to return a loading state
- mockUseReplayReader.mockImplementationOnce(() => {
+ mockUseLoadReplayReader.mockImplementationOnce(() => {
return {
attachments: [],
errors: [],
@@ -141,7 +141,7 @@ describe('ReplayClipPreview', () => {
it('Should throw error when there is a fetch error', () => {
// Change the mocked hook to return a fetch error
- mockUseReplayReader.mockImplementationOnce(() => {
+ mockUseLoadReplayReader.mockImplementationOnce(() => {
return {
attachments: [],
errors: [],
diff --git a/static/app/components/events/eventReplay/replayClipPreview.tsx b/static/app/components/events/eventReplay/replayClipPreview.tsx
index 54d2c3299800f6..8d4c011fafaffa 100644
--- a/static/app/components/events/eventReplay/replayClipPreview.tsx
+++ b/static/app/components/events/eventReplay/replayClipPreview.tsx
@@ -2,7 +2,7 @@ import {useMemo} from 'react';
import ReplayClipPreviewPlayer from 'sentry/components/events/eventReplay/replayClipPreviewPlayer';
import {Provider as ReplayContextProvider} from 'sentry/components/replays/replayContext';
-import useReplayReader from 'sentry/utils/replays/hooks/useReplayReader';
+import useLoadReplayReader from 'sentry/utils/replays/hooks/useLoadReplayReader';
interface ReplayClipPreviewProps
extends Omit<
@@ -33,7 +33,7 @@ function ReplayClipPreview({
[clipOffsets.durationBeforeMs, clipOffsets.durationAfterMs, eventTimestampMs]
);
- const replayReaderResult = useReplayReader({
+ const replayReaderResult = useLoadReplayReader({
orgSlug,
replaySlug,
clipWindow,
diff --git a/static/app/components/events/eventReplay/replayClipPreviewPlayer.tsx b/static/app/components/events/eventReplay/replayClipPreviewPlayer.tsx
index 6094ae9c35957d..fe4cccb6050f13 100644
--- a/static/app/components/events/eventReplay/replayClipPreviewPlayer.tsx
+++ b/static/app/components/events/eventReplay/replayClipPreviewPlayer.tsx
@@ -18,7 +18,7 @@ import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {trackAnalytics} from 'sentry/utils/analytics';
import type {TabKey} from 'sentry/utils/replays/hooks/useActiveReplayTab';
-import type useReplayReader from 'sentry/utils/replays/hooks/useReplayReader';
+import type useLoadReplayReader from 'sentry/utils/replays/hooks/useLoadReplayReader';
import type RequestError from 'sentry/utils/requestError/requestError';
import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyticsParams';
import useOrganization from 'sentry/utils/useOrganization';
@@ -28,7 +28,7 @@ import type {ReplayRecord} from 'sentry/views/replays/types';
interface ReplayClipPreviewPlayerProps {
analyticsContext: string;
orgSlug: string;
- replayReaderResult: ReturnType;
+ replayReaderResult: ReturnType;
focusTab?: TabKey;
fullReplayButtonProps?: Partial>;
handleBackClick?: () => void;
diff --git a/static/app/components/events/eventReplay/replayInlineOnboardingPanel.tsx b/static/app/components/events/eventReplay/replayInlineOnboardingPanel.tsx
index f8f1277bab45e3..93d6ff2714dc9b 100644
--- a/static/app/components/events/eventReplay/replayInlineOnboardingPanel.tsx
+++ b/static/app/components/events/eventReplay/replayInlineOnboardingPanel.tsx
@@ -60,10 +60,11 @@ export default function ReplayInlineOnboardingPanel({
activateSidebar(projectId)}
>
{t('Set Up Now')}
diff --git a/static/app/components/events/eventReplay/replayPreview.spec.tsx b/static/app/components/events/eventReplay/replayPreview.spec.tsx
index 3197984e1413dc..fc57d720f8286a 100644
--- a/static/app/components/events/eventReplay/replayPreview.spec.tsx
+++ b/static/app/components/events/eventReplay/replayPreview.spec.tsx
@@ -6,15 +6,15 @@ import {ReplayRecordFixture} from 'sentry-fixture/replayRecord';
import {initializeOrg} from 'sentry-test/initializeOrg';
import {render as baseRender, screen} from 'sentry-test/reactTestingLibrary';
-import useReplayReader from 'sentry/utils/replays/hooks/useReplayReader';
+import useLoadReplayReader from 'sentry/utils/replays/hooks/useLoadReplayReader';
import ReplayReader from 'sentry/utils/replays/replayReader';
import type RequestError from 'sentry/utils/requestError/requestError';
import ReplayPreview from './replayPreview';
-jest.mock('sentry/utils/replays/hooks/useReplayReader');
+jest.mock('sentry/utils/replays/hooks/useLoadReplayReader');
-const mockUseReplayReader = jest.mocked(useReplayReader);
+const mockUseLoadReplayReader = jest.mocked(useLoadReplayReader);
const mockOrgSlug = 'sentry-emerging-tech';
const mockReplaySlug = 'replays:761104e184c64d439ee1014b72b4d83b';
@@ -39,7 +39,7 @@ const mockReplay = ReplayReader.factory({
}),
});
-mockUseReplayReader.mockImplementation(() => {
+mockUseLoadReplayReader.mockImplementation(() => {
return {
attachments: [],
errors: [],
@@ -84,7 +84,7 @@ const defaultProps = {
describe('ReplayPreview', () => {
it('Should render a placeholder when is fetching the replay data', () => {
// Change the mocked hook to return a loading state
- mockUseReplayReader.mockImplementationOnce(() => {
+ mockUseLoadReplayReader.mockImplementationOnce(() => {
return {
attachments: [],
errors: [],
@@ -105,7 +105,7 @@ describe('ReplayPreview', () => {
it('Should throw error when there is a fetch error', () => {
// Change the mocked hook to return a fetch error
- mockUseReplayReader.mockImplementationOnce(() => {
+ mockUseLoadReplayReader.mockImplementationOnce(() => {
return {
attachments: [],
errors: [],
diff --git a/static/app/components/events/eventReplay/replayPreview.tsx b/static/app/components/events/eventReplay/replayPreview.tsx
index e531a79e32e631..8a5575a62871c0 100644
--- a/static/app/components/events/eventReplay/replayPreview.tsx
+++ b/static/app/components/events/eventReplay/replayPreview.tsx
@@ -15,7 +15,7 @@ import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {trackAnalytics} from 'sentry/utils/analytics';
import type {TabKey} from 'sentry/utils/replays/hooks/useActiveReplayTab';
-import useReplayReader from 'sentry/utils/replays/hooks/useReplayReader';
+import useLoadReplayReader from 'sentry/utils/replays/hooks/useLoadReplayReader';
import type RequestError from 'sentry/utils/requestError/requestError';
import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyticsParams';
import useOrganization from 'sentry/utils/useOrganization';
@@ -60,7 +60,7 @@ function ReplayPreview({
orgSlug,
replaySlug,
}: Props) {
- const {fetching, replay, replayRecord, fetchError, replayId} = useReplayReader({
+ const {fetching, replay, replayRecord, fetchError, replayId} = useLoadReplayReader({
orgSlug,
replaySlug,
});
diff --git a/static/app/components/events/eventStatisticalDetector/aggregateSpanDiff.tsx b/static/app/components/events/eventStatisticalDetector/aggregateSpanDiff.tsx
index 90d06bf3cc21d9..e1013e44ebdc8b 100644
--- a/static/app/components/events/eventStatisticalDetector/aggregateSpanDiff.tsx
+++ b/static/app/components/events/eventStatisticalDetector/aggregateSpanDiff.tsx
@@ -149,8 +149,8 @@ function AggregateSpanDiff({event, project}: AggregateSpanDiffProps) {
};
if (causeType === 'throughput') {
- const throughputBefore = row[`epm_by_timestamp(less,${breakpoint})`];
- const throughputAfter = row[`epm_by_timestamp(greater,${breakpoint})`];
+ const throughputBefore = row[`epm_by_timestamp(less,${breakpoint})`]!;
+ const throughputAfter = row[`epm_by_timestamp(greater,${breakpoint})`]!;
return {
...commonProps,
throughputBefore,
@@ -160,9 +160,9 @@ function AggregateSpanDiff({event, project}: AggregateSpanDiffProps) {
}
const durationBefore =
- row[`avg_by_timestamp(span.self_time,less,${breakpoint})`] / 1e3;
+ row[`avg_by_timestamp(span.self_time,less,${breakpoint})`]! / 1e3;
const durationAfter =
- row[`avg_by_timestamp(span.self_time,greater,${breakpoint})`] / 1e3;
+ row[`avg_by_timestamp(span.self_time,greater,${breakpoint})`]! / 1e3;
return {
...commonProps,
durationBefore,
diff --git a/static/app/components/events/eventStatisticalDetector/eventComparison/eventDisplay.tsx b/static/app/components/events/eventStatisticalDetector/eventComparison/eventDisplay.tsx
index 37a80abb359873..a4af44ae8ccc73 100644
--- a/static/app/components/events/eventStatisticalDetector/eventComparison/eventDisplay.tsx
+++ b/static/app/components/events/eventStatisticalDetector/eventComparison/eventDisplay.tsx
@@ -163,7 +163,7 @@ function EventDisplay({
useEffect(() => {
if (defined(eventIds) && eventIds.length > 0 && !selectedEventId) {
- setSelectedEventId(eventIds[0]);
+ setSelectedEventId(eventIds[0]!);
}
}, [eventIds, selectedEventId]);
@@ -242,7 +242,7 @@ function EventDisplay({
icon={ }
onPaginate={() => {
if (hasPrev) {
- setSelectedEventId(eventIds[eventIdIndex - 1]);
+ setSelectedEventId(eventIds[eventIdIndex - 1]!);
}
}}
/>
@@ -252,7 +252,7 @@ function EventDisplay({
icon={ }
onPaginate={() => {
if (hasNext) {
- setSelectedEventId(eventIds[eventIdIndex + 1]);
+ setSelectedEventId(eventIds[eventIdIndex + 1]!);
}
}}
/>
diff --git a/static/app/components/events/eventStatisticalDetector/eventThroughput.tsx b/static/app/components/events/eventStatisticalDetector/eventThroughput.tsx
index 40c86132577dc4..438b82d19ab8d0 100644
--- a/static/app/components/events/eventStatisticalDetector/eventThroughput.tsx
+++ b/static/app/components/events/eventStatisticalDetector/eventThroughput.tsx
@@ -100,7 +100,7 @@ function EventThroughputInner({event, group}: EventThroughputProps) {
const result = transformEventStats(
stats.series.map(item => [item.timestamp, [{count: item.value / item.interval}]]),
'throughput()'
- )[0];
+ )[0]!;
result.markLine = {
data: [
@@ -293,7 +293,7 @@ function useThroughputStats({datetime, event, group}: UseThroughputStatsOptions)
if (data.length < 2) {
return null;
}
- return data[1][0] - data[0][0];
+ return data[1]![0] - data[0]![0];
}, [transactionStats?.data]);
const transactionData = useMemo(() => {
@@ -305,7 +305,7 @@ function useThroughputStats({datetime, event, group}: UseThroughputStatsOptions)
const timestamp = curr[0];
const bucket = Math.floor(timestamp / BUCKET_SIZE) * BUCKET_SIZE;
const prev = acc[acc.length - 1];
- const value = curr[1][0].count;
+ const value = curr[1]![0]!.count;
if (prev?.timestamp === bucket) {
prev.value += value;
diff --git a/static/app/components/events/eventStatisticalDetector/spanOpBreakdown.tsx b/static/app/components/events/eventStatisticalDetector/spanOpBreakdown.tsx
index 2b84d78ec6405b..d61515d196f450 100644
--- a/static/app/components/events/eventStatisticalDetector/spanOpBreakdown.tsx
+++ b/static/app/components/events/eventStatisticalDetector/spanOpBreakdown.tsx
@@ -121,13 +121,13 @@ function EventSpanOpBreakdown({event}: {event: Event}) {
const spanOpDiffs: SpanOpDiff[] = SPAN_OPS.map(op => {
const preBreakpointValue =
- (preBreakpointData?.data[0][`p95(spans.${op})`] as string) || undefined;
+ (preBreakpointData?.data[0]![`p95(spans.${op})`] as string) || undefined;
const preBreakpointValueAsNumber = preBreakpointValue
? parseInt(preBreakpointValue, 10)
: 0;
const postBreakpointValue =
- (postBreakpointData?.data[0][`p95(spans.${op})`] as string) || undefined;
+ (postBreakpointData?.data[0]![`p95(spans.${op})`] as string) || undefined;
const postBreakpointValueAsNumber = postBreakpointValue
? parseInt(postBreakpointValue, 10)
: 0;
diff --git a/static/app/components/events/eventTags/eventTagsTree.spec.tsx b/static/app/components/events/eventTags/eventTagsTree.spec.tsx
index 0f417a8c25aa47..90e9c6a3396dc8 100644
--- a/static/app/components/events/eventTags/eventTagsTree.spec.tsx
+++ b/static/app/components/events/eventTags/eventTagsTree.spec.tsx
@@ -255,7 +255,7 @@ describe('EventTagsTree', function () {
expect(dropdown).toBeInTheDocument();
const errorRows = screen.queryAllByTestId('tag-tree-row-errors');
- expect(errorRows.length).toBe(2);
+ expect(errorRows).toHaveLength(2);
});
it('avoids rendering nullish tags', async function () {
diff --git a/static/app/components/events/eventTags/eventTagsTree.tsx b/static/app/components/events/eventTags/eventTagsTree.tsx
index f112a5d40eac2a..d607b2c0fd1679 100644
--- a/static/app/components/events/eventTags/eventTagsTree.tsx
+++ b/static/app/components/events/eventTags/eventTagsTree.tsx
@@ -100,7 +100,7 @@ function getTagTreeRows({
const branchRows = getTagTreeRows({
...props,
tagKey: tag,
- content: content.subtree[tag],
+ content: content.subtree[tag]!,
spacerCount: spacerCount + 1,
isLast: i === subtreeTags.length - 1,
// Encoding the trunk index with the branch index ensures uniqueness for the key
@@ -154,7 +154,7 @@ function TagTreeColumns({
// root parent so that we do not split up roots/branches when forming columns
const tagTreeRowGroups: React.ReactNode[][] = Object.entries(tagTree).map(
([tagKey, content], i) =>
- getTagTreeRows({tagKey, content, uniqueKey: `${i}`, project: project, ...props})
+ getTagTreeRows({tagKey, content, uniqueKey: `${i}`, project, ...props})
);
// Get the total number of TagTreeRow components to be rendered, and a goal size for each column
const tagTreeRowTotal = tagTreeRowGroups.reduce(
diff --git a/static/app/components/events/eventTagsAndScreenshot/index.spec.tsx b/static/app/components/events/eventTagsAndScreenshot/index.spec.tsx
index 1b15b8a38b8af8..e33c346f48b647 100644
--- a/static/app/components/events/eventTagsAndScreenshot/index.spec.tsx
+++ b/static/app/components/events/eventTagsAndScreenshot/index.spec.tsx
@@ -221,7 +221,7 @@ describe('EventTagsAndScreenshot', function () {
const allTags = applicationTags.concat(customTags);
const testEvent = EventFixture({tags: allTags});
render( , {
- organization: organization,
+ organization,
});
expect(mockDetailedProject).toHaveBeenCalled();
expect(await screen.findByTestId('loading-indicator')).not.toBeInTheDocument();
@@ -260,7 +260,7 @@ describe('EventTagsAndScreenshot', function () {
];
const testEvent = EventFixture({tags: applicationTags});
render( , {
- organization: organization,
+ organization,
});
expect(mockDetailedProject).toHaveBeenCalled();
expect(await screen.findByTestId('loading-indicator')).not.toBeInTheDocument();
@@ -311,7 +311,7 @@ describe('EventTagsAndScreenshot', function () {
expect(screen.getByText('View screenshot')).toBeInTheDocument();
expect(screen.getByTestId('image-viewer')).toHaveAttribute(
'src',
- `/api/0/projects/${organization.slug}/${project.slug}/events/${event.id}/attachments/${attachments[1].id}/?download`
+ `/api/0/projects/${organization.slug}/${project.slug}/events/${event.id}/attachments/${attachments[1]!.id}/?download`
);
// Display help text when hovering question element
@@ -354,7 +354,7 @@ describe('EventTagsAndScreenshot', function () {
expect(await screen.findByText('View screenshot')).toBeInTheDocument();
expect(screen.getByTestId('image-viewer')).toHaveAttribute(
'src',
- `/api/0/projects/${organization.slug}/${project.slug}/events/${event.id}/attachments/${attachments[1].id}/?download`
+ `/api/0/projects/${organization.slug}/${project.slug}/events/${event.id}/attachments/${attachments[1]!.id}/?download`
);
expect(screen.getByTestId('screenshot-data-section')?.textContent).toContain(
@@ -403,7 +403,7 @@ describe('EventTagsAndScreenshot', function () {
expect(screen.getByText('View screenshot')).toBeInTheDocument();
expect(screen.getByTestId('image-viewer')).toHaveAttribute(
'src',
- `/api/0/projects/${organization.slug}/${project.slug}/events/${event.id}/attachments/${moreAttachments[1].id}/?download`
+ `/api/0/projects/${organization.slug}/${project.slug}/events/${event.id}/attachments/${moreAttachments[1]!.id}/?download`
);
await userEvent.click(screen.getByRole('button', {name: 'Next Screenshot'}));
@@ -415,7 +415,7 @@ describe('EventTagsAndScreenshot', function () {
expect(screen.getByText('View screenshot')).toBeInTheDocument();
expect(screen.getByTestId('image-viewer')).toHaveAttribute(
'src',
- `/api/0/projects/${organization.slug}/${project.slug}/events/${event.id}/attachments/${moreAttachments[2].id}/?download`
+ `/api/0/projects/${organization.slug}/${project.slug}/events/${event.id}/attachments/${moreAttachments[2]!.id}/?download`
);
});
@@ -473,7 +473,7 @@ describe('EventTagsAndScreenshot', function () {
expect(screen.getByText('View screenshot')).toBeInTheDocument();
expect(screen.getByTestId('image-viewer')).toHaveAttribute(
'src',
- `/api/0/projects/${organization.slug}/${project.slug}/events/${event.id}/attachments/${attachments[1].id}/?download`
+ `/api/0/projects/${organization.slug}/${project.slug}/events/${event.id}/attachments/${attachments[1]!.id}/?download`
);
});
});
diff --git a/static/app/components/events/eventTagsAndScreenshot/screenshot/modal.tsx b/static/app/components/events/eventTagsAndScreenshot/screenshot/modal.tsx
index 7fd8a8e250f2e9..a301859b702c01 100644
--- a/static/app/components/events/eventTagsAndScreenshot/screenshot/modal.tsx
+++ b/static/app/components/events/eventTagsAndScreenshot/screenshot/modal.tsx
@@ -65,7 +65,7 @@ export default function ScreenshotModal({
if (attachments.length) {
const newIndex = currentAttachmentIndex + delta;
if (newIndex >= 0 && newIndex < attachments.length) {
- setCurrentAttachment(attachments[newIndex]);
+ setCurrentAttachment(attachments[newIndex]!);
}
}
},
diff --git a/static/app/components/events/eventTagsAndScreenshot/screenshot/screenshotDataSection.tsx b/static/app/components/events/eventTagsAndScreenshot/screenshot/screenshotDataSection.tsx
index d19b2301cdfc56..6527ad69a9ff0f 100644
--- a/static/app/components/events/eventTagsAndScreenshot/screenshot/screenshotDataSection.tsx
+++ b/static/app/components/events/eventTagsAndScreenshot/screenshot/screenshotDataSection.tsx
@@ -63,7 +63,7 @@ export function ScreenshotDataSection({
const [screenshotInFocus, setScreenshotInFocus] = useState(0);
const showScreenshot = !isShare && !!screenshots.length;
- const screenshot = screenshots[screenshotInFocus];
+ const screenshot = screenshots[screenshotInFocus]!;
const handleDeleteScreenshot = (attachmentId: string) => {
deleteAttachment({
diff --git a/static/app/components/events/eventTagsAndScreenshot/screenshot/screenshotPagination.tsx b/static/app/components/events/eventTagsAndScreenshot/screenshot/screenshotPagination.tsx
index d5c1d2f43f40b5..07de19714552f4 100644
--- a/static/app/components/events/eventTagsAndScreenshot/screenshot/screenshotPagination.tsx
+++ b/static/app/components/events/eventTagsAndScreenshot/screenshot/screenshotPagination.tsx
@@ -1,4 +1,3 @@
-// eslint-disable-next-line no-restricted-imports
import type {ReactEventHandler} from 'react';
import styled from '@emotion/styled';
diff --git a/static/app/components/events/eventTagsAndScreenshot/tags.tsx b/static/app/components/events/eventTagsAndScreenshot/tags.tsx
index f419307fe51416..12fb8ba10c380c 100644
--- a/static/app/components/events/eventTagsAndScreenshot/tags.tsx
+++ b/static/app/components/events/eventTagsAndScreenshot/tags.tsx
@@ -56,9 +56,12 @@ export const EventTagsDataSection = forwardRef(
});
}, [event.tags]);
+ // Prevent drawer button from appearing on performance pages
+ const isOnIssueDetails = location.pathname.includes('/issues/');
+
const actions = (
- {hasStreamlinedUI && event.groupID && (
+ {hasStreamlinedUI && event.groupID && isOnIssueDetails && (
;
+ let mockProject!: ReturnType;
beforeEach(function () {
mockAttachment = EventAttachmentFixture({type: 'event.view_hierarchy'});
mockProject = ProjectFixture();
diff --git a/static/app/components/events/eventVitals.tsx b/static/app/components/events/eventVitals.tsx
index 05c335b22a1961..91b7501fec1849 100644
--- a/static/app/components/events/eventVitals.tsx
+++ b/static/app/components/events/eventVitals.tsx
@@ -119,7 +119,7 @@ interface EventVitalProps extends Props {
}
function EventVital({event, name, vital}: EventVitalProps) {
- const value = event.measurements?.[name].value ?? null;
+ const value = event.measurements?.[name]!.value ?? null;
if (value === null || !vital) {
return null;
}
diff --git a/static/app/components/events/featureFlags/eventFeatureFlagList.spec.tsx b/static/app/components/events/featureFlags/eventFeatureFlagList.spec.tsx
index 817f37e932878f..0e8c82b5ccd04f 100644
--- a/static/app/components/events/featureFlags/eventFeatureFlagList.spec.tsx
+++ b/static/app/components/events/featureFlags/eventFeatureFlagList.spec.tsx
@@ -176,8 +176,8 @@ describe('EventFeatureFlagList', function () {
// expect enableReplay to be preceding webVitalsFlag
expect(
screen
- .getByText(webVitalsFlag.flag)
- .compareDocumentPosition(screen.getByText(enableReplay.flag))
+ .getByText(webVitalsFlag!.flag)
+ .compareDocumentPosition(screen.getByText(enableReplay!.flag))
).toBe(document.DOCUMENT_POSITION_PRECEDING);
const sortControl = screen.getByRole('button', {
@@ -189,8 +189,8 @@ describe('EventFeatureFlagList', function () {
// expect enableReplay to be following webVitalsFlag
expect(
screen
- .getByText(webVitalsFlag.flag)
- .compareDocumentPosition(screen.getByText(enableReplay.flag))
+ .getByText(webVitalsFlag!.flag)
+ .compareDocumentPosition(screen.getByText(enableReplay!.flag))
).toBe(document.DOCUMENT_POSITION_FOLLOWING);
await userEvent.click(sortControl);
@@ -199,8 +199,8 @@ describe('EventFeatureFlagList', function () {
// expect enableReplay to be preceding webVitalsFlag, A-Z sort by default
expect(
screen
- .getByText(webVitalsFlag.flag)
- .compareDocumentPosition(screen.getByText(enableReplay.flag))
+ .getByText(webVitalsFlag!.flag)
+ .compareDocumentPosition(screen.getByText(enableReplay!.flag))
).toBe(document.DOCUMENT_POSITION_PRECEDING);
await userEvent.click(sortControl);
@@ -209,8 +209,8 @@ describe('EventFeatureFlagList', function () {
// expect enableReplay to be following webVitalsFlag
expect(
screen
- .getByText(webVitalsFlag.flag)
- .compareDocumentPosition(screen.getByText(enableReplay.flag))
+ .getByText(webVitalsFlag!.flag)
+ .compareDocumentPosition(screen.getByText(enableReplay!.flag))
).toBe(document.DOCUMENT_POSITION_FOLLOWING);
});
@@ -223,7 +223,7 @@ describe('EventFeatureFlagList', function () {
expect(search).not.toBeInTheDocument();
expect(screen.getByRole('button', {name: 'Set Up Integration'})).toBeInTheDocument();
expect(
- screen.queryByText('No feature flags were found for this event')
+ screen.getByText('No feature flags were found for this event')
).toBeInTheDocument();
});
diff --git a/static/app/components/events/featureFlags/eventFeatureFlagList.tsx b/static/app/components/events/featureFlags/eventFeatureFlagList.tsx
index 550a4d2b3e8ca7..1e9380f6131a91 100644
--- a/static/app/components/events/featureFlags/eventFeatureFlagList.tsx
+++ b/static/app/components/events/featureFlags/eventFeatureFlagList.tsx
@@ -24,7 +24,7 @@ import {featureFlagOnboardingPlatforms} from 'sentry/data/platformCategories';
import {IconMegaphone, IconSearch} from 'sentry/icons';
import {t} from 'sentry/locale';
import type {Event, FeatureFlag} from 'sentry/types/event';
-import type {Group} from 'sentry/types/group';
+import {type Group, IssueCategory} from 'sentry/types/group';
import type {Project} from 'sentry/types/project';
import {trackAnalytics} from 'sentry/utils/analytics';
import {useFeedbackForm} from 'sentry/utils/useFeedbackForm';
@@ -197,6 +197,10 @@ export function EventFeatureFlagList({
}
}, [hasFlags, hydratedFlags.length, organization]);
+ if (group.issueCategory !== IssueCategory.ERROR) {
+ return null;
+ }
+
if (showCTA) {
return ;
}
diff --git a/static/app/components/events/featureFlags/featureFlagDrawer.spec.tsx b/static/app/components/events/featureFlags/featureFlagDrawer.spec.tsx
index d7f38d6317839c..cff3ed55b90c33 100644
--- a/static/app/components/events/featureFlags/featureFlagDrawer.spec.tsx
+++ b/static/app/components/events/featureFlags/featureFlagDrawer.spec.tsx
@@ -25,7 +25,7 @@ async function renderFlagDrawer() {
}));
render( );
await userEvent.click(screen.getByRole('button', {name: 'View All'}));
- return within(screen.getByRole('complementary', {name: 'Feature flags drawer'}));
+ return screen.getByRole('complementary', {name: 'Feature flags drawer'});
}
describe('FeatureFlagDrawer', function () {
@@ -45,25 +45,33 @@ describe('FeatureFlagDrawer', function () {
});
it('renders the drawer as expected', async function () {
const drawerScreen = await renderFlagDrawer();
- expect(drawerScreen.getByRole('button', {name: 'Close Drawer'})).toBeInTheDocument();
+ expect(
+ within(drawerScreen).getByRole('button', {name: 'Close Drawer'})
+ ).toBeInTheDocument();
// Inner drawer flags
const {event, group} = MOCK_DATA_SECTION_PROPS;
- expect(drawerScreen.getByText(group.shortId)).toBeInTheDocument();
- expect(drawerScreen.getByText(event.id.slice(0, 8))).toBeInTheDocument();
+ expect(within(drawerScreen).getByText(group.shortId)).toBeInTheDocument();
+ expect(within(drawerScreen).getByText(event.id.slice(0, 8))).toBeInTheDocument();
expect(
- drawerScreen.getByText('Feature Flags', {selector: 'span'})
+ within(drawerScreen).getByText('Feature Flags', {selector: 'span'})
).toBeInTheDocument();
// Header & Controls
- expect(drawerScreen.getByText('Feature Flags', {selector: 'h3'})).toBeInTheDocument();
- expect(drawerScreen.getByRole('textbox', {name: 'Search Flags'})).toBeInTheDocument();
- expect(drawerScreen.getByRole('button', {name: 'Sort Flags'})).toBeInTheDocument();
+ expect(
+ within(drawerScreen).getByText('Feature Flags', {selector: 'h3'})
+ ).toBeInTheDocument();
+ expect(
+ within(drawerScreen).getByRole('textbox', {name: 'Search Flags'})
+ ).toBeInTheDocument();
+ expect(
+ within(drawerScreen).getByRole('button', {name: 'Sort Flags'})
+ ).toBeInTheDocument();
// Contents
for (const {flag, result} of MOCK_FLAGS) {
- expect(drawerScreen.getByText(flag)).toBeInTheDocument();
- expect(drawerScreen.getAllByText(result.toString())[0]).toBeInTheDocument();
+ expect(within(drawerScreen).getByText(flag)).toBeInTheDocument();
+ expect(within(drawerScreen).getAllByText(result.toString())[0]).toBeInTheDocument();
}
});
@@ -71,16 +79,16 @@ describe('FeatureFlagDrawer', function () {
const drawerScreen = await renderFlagDrawer();
const [webVitalsFlag, enableReplay] = MOCK_FLAGS.filter(f => f.result === true);
- expect(drawerScreen.getByText(webVitalsFlag.flag)).toBeInTheDocument();
- expect(drawerScreen.getByText(enableReplay.flag)).toBeInTheDocument();
+ expect(within(drawerScreen).getByText(webVitalsFlag!.flag)).toBeInTheDocument();
+ expect(within(drawerScreen).getByText(enableReplay!.flag)).toBeInTheDocument();
- const searchInput = drawerScreen.getByRole('textbox', {
+ const searchInput = within(drawerScreen).getByRole('textbox', {
name: 'Search Flags',
});
- await userEvent.type(searchInput, webVitalsFlag.flag);
+ await userEvent.type(searchInput, webVitalsFlag!.flag);
- expect(drawerScreen.getByText(webVitalsFlag.flag)).toBeInTheDocument();
- expect(drawerScreen.queryByText(enableReplay.flag)).not.toBeInTheDocument();
+ expect(within(drawerScreen).getByText(webVitalsFlag!.flag)).toBeInTheDocument();
+ expect(within(drawerScreen).queryByText(enableReplay!.flag)).not.toBeInTheDocument();
});
it('allows sort dropdown to affect displayed flags', async function () {
@@ -90,62 +98,69 @@ describe('FeatureFlagDrawer', function () {
// the flags are reversed by default, so webVitalsFlag should be following enableReplay
expect(
- drawerScreen
- .getByText(enableReplay.flag)
- .compareDocumentPosition(drawerScreen.getByText(webVitalsFlag.flag))
+ within(drawerScreen)
+ .getByText(enableReplay!.flag)
+ .compareDocumentPosition(within(drawerScreen).getByText(webVitalsFlag!.flag))
).toBe(document.DOCUMENT_POSITION_FOLLOWING);
- const sortControl = drawerScreen.getByRole('button', {
+ const sortControl = within(drawerScreen).getByRole('button', {
name: 'Sort Flags',
});
await userEvent.click(sortControl);
- await userEvent.click(drawerScreen.getByRole('option', {name: 'Oldest First'}));
+ await userEvent.click(
+ within(drawerScreen).getByRole('option', {name: 'Oldest First'})
+ );
// expect webVitalsFlag to be preceding enableReplay
expect(
- drawerScreen
- .getByText(enableReplay.flag)
- .compareDocumentPosition(drawerScreen.getByText(webVitalsFlag.flag))
+ within(drawerScreen)
+ .getByText(enableReplay!.flag)
+ .compareDocumentPosition(within(drawerScreen).getByText(webVitalsFlag!.flag))
).toBe(document.DOCUMENT_POSITION_PRECEDING);
await userEvent.click(sortControl);
- await userEvent.click(drawerScreen.getByRole('option', {name: 'Alphabetical'}));
+ await userEvent.click(
+ within(drawerScreen).getByRole('option', {name: 'Alphabetical'})
+ );
await userEvent.click(sortControl);
- await userEvent.click(drawerScreen.getByRole('option', {name: 'Z-A'}));
+ await userEvent.click(within(drawerScreen).getByRole('option', {name: 'Z-A'}));
// enableReplay follows webVitalsFlag in Z-A sort
expect(
- drawerScreen
- .getByText(webVitalsFlag.flag)
- .compareDocumentPosition(drawerScreen.getByText(enableReplay.flag))
+ within(drawerScreen)
+ .getByText(webVitalsFlag!.flag)
+ .compareDocumentPosition(within(drawerScreen).getByText(enableReplay!.flag))
).toBe(document.DOCUMENT_POSITION_FOLLOWING);
});
it('renders a sort dropdown with Evaluation Order as the default', async function () {
const drawerScreen = await renderFlagDrawer();
- const control = drawerScreen.getByRole('button', {name: 'Sort Flags'});
+ const control = within(drawerScreen).getByRole('button', {name: 'Sort Flags'});
expect(control).toBeInTheDocument();
await userEvent.click(control);
expect(
- drawerScreen.getByRole('option', {name: 'Evaluation Order'})
+ within(drawerScreen).getByRole('option', {name: 'Evaluation Order'})
+ ).toBeInTheDocument();
+ expect(
+ within(drawerScreen).getByRole('option', {name: 'Alphabetical'})
).toBeInTheDocument();
- expect(drawerScreen.getByRole('option', {name: 'Alphabetical'})).toBeInTheDocument();
});
it('renders a sort dropdown which affects the granular sort dropdown', async function () {
const drawerScreen = await renderFlagDrawer();
- const control = drawerScreen.getByRole('button', {name: 'Sort Flags'});
+ const control = within(drawerScreen).getByRole('button', {name: 'Sort Flags'});
expect(control).toBeInTheDocument();
await userEvent.click(control);
- await userEvent.click(drawerScreen.getByRole('option', {name: 'Alphabetical'}));
- await userEvent.click(control);
- expect(drawerScreen.getByRole('option', {name: 'Alphabetical'})).toHaveAttribute(
- 'aria-selected',
- 'true'
+ await userEvent.click(
+ within(drawerScreen).getByRole('option', {name: 'Alphabetical'})
);
- expect(drawerScreen.getByRole('option', {name: 'A-Z'})).toHaveAttribute(
+ await userEvent.click(control);
+ expect(
+ within(drawerScreen).getByRole('option', {name: 'Alphabetical'})
+ ).toHaveAttribute('aria-selected', 'true');
+ expect(within(drawerScreen).getByRole('option', {name: 'A-Z'})).toHaveAttribute(
'aria-selected',
'true'
);
@@ -154,35 +169,35 @@ describe('FeatureFlagDrawer', function () {
it('renders a sort dropdown which disables the appropriate options', async function () {
const drawerScreen = await renderFlagDrawer();
- const control = drawerScreen.getByRole('button', {name: 'Sort Flags'});
+ const control = within(drawerScreen).getByRole('button', {name: 'Sort Flags'});
expect(control).toBeInTheDocument();
await userEvent.click(control);
- await userEvent.click(drawerScreen.getByRole('option', {name: 'Alphabetical'}));
- await userEvent.click(control);
- expect(drawerScreen.getByRole('option', {name: 'Alphabetical'})).toHaveAttribute(
- 'aria-selected',
- 'true'
- );
- expect(drawerScreen.getByRole('option', {name: 'Newest First'})).toHaveAttribute(
- 'aria-disabled',
- 'true'
+ await userEvent.click(
+ within(drawerScreen).getByRole('option', {name: 'Alphabetical'})
);
- expect(drawerScreen.getByRole('option', {name: 'Oldest First'})).toHaveAttribute(
- 'aria-disabled',
- 'true'
- );
-
- await userEvent.click(drawerScreen.getByRole('option', {name: 'Evaluation Order'}));
await userEvent.click(control);
- expect(drawerScreen.getByRole('option', {name: 'Evaluation Order'})).toHaveAttribute(
- 'aria-selected',
- 'true'
+ expect(
+ within(drawerScreen).getByRole('option', {name: 'Alphabetical'})
+ ).toHaveAttribute('aria-selected', 'true');
+ expect(
+ within(drawerScreen).getByRole('option', {name: 'Newest First'})
+ ).toHaveAttribute('aria-disabled', 'true');
+ expect(
+ within(drawerScreen).getByRole('option', {name: 'Oldest First'})
+ ).toHaveAttribute('aria-disabled', 'true');
+
+ await userEvent.click(
+ within(drawerScreen).getByRole('option', {name: 'Evaluation Order'})
);
- expect(drawerScreen.getByRole('option', {name: 'Z-A'})).toHaveAttribute(
+ await userEvent.click(control);
+ expect(
+ within(drawerScreen).getByRole('option', {name: 'Evaluation Order'})
+ ).toHaveAttribute('aria-selected', 'true');
+ expect(within(drawerScreen).getByRole('option', {name: 'Z-A'})).toHaveAttribute(
'aria-disabled',
'true'
);
- expect(drawerScreen.getByRole('option', {name: 'A-Z'})).toHaveAttribute(
+ expect(within(drawerScreen).getByRole('option', {name: 'A-Z'})).toHaveAttribute(
'aria-disabled',
'true'
);
diff --git a/static/app/components/events/featureFlags/featureFlagOnboardingSidebar.tsx b/static/app/components/events/featureFlags/featureFlagOnboardingSidebar.tsx
index ea3a6dbd5551ad..8f1eca42a7dbc7 100644
--- a/static/app/components/events/featureFlags/featureFlagOnboardingSidebar.tsx
+++ b/static/app/components/events/featureFlags/featureFlagOnboardingSidebar.tsx
@@ -173,7 +173,7 @@ function OnboardingContent({
value: string;
label?: ReactNode;
textValue?: string;
- }>(openFeatureProviderOptions[0]);
+ }>(openFeatureProviderOptions[0]!);
// Second dropdown: other SDK providers
const sdkProviderOptions = sdkProviders.map(provider => {
@@ -188,7 +188,7 @@ function OnboardingContent({
value: string;
label?: ReactNode;
textValue?: string;
- }>(sdkProviderOptions[0]);
+ }>(sdkProviderOptions[0]!);
const defaultTab: string = 'openFeature';
const {getParamValue: setupMode, setParamValue: setSetupMode} = useUrlParams(
diff --git a/static/app/components/events/featureFlags/useIssueEvents.tsx b/static/app/components/events/featureFlags/useIssueEvents.tsx
index 11c7fd95a66f3a..b4196feaedb113 100644
--- a/static/app/components/events/featureFlags/useIssueEvents.tsx
+++ b/static/app/components/events/featureFlags/useIssueEvents.tsx
@@ -1,4 +1,4 @@
-import type {Event} from '@sentry/types';
+import type {Event} from '@sentry/core';
import {useApiQuery} from 'sentry/utils/queryClient';
import useOrganization from 'sentry/utils/useOrganization';
diff --git a/static/app/components/events/groupingInfo/groupingComponentStacktrace.tsx b/static/app/components/events/groupingInfo/groupingComponentStacktrace.tsx
index 427d5adb7bd275..3ff9cdeb0d61ca 100644
--- a/static/app/components/events/groupingInfo/groupingComponentStacktrace.tsx
+++ b/static/app/components/events/groupingInfo/groupingComponentStacktrace.tsx
@@ -1,5 +1,6 @@
import {Fragment} from 'react';
+import {isStacktraceNewestFirst} from 'sentry/components/events/interfaces/utils';
import type {EventGroupComponent} from 'sentry/types/event';
import GroupingComponent from './groupingComponent';
@@ -20,7 +21,10 @@ function GroupingComponentStacktrace({component, showNonContributing}: Props) {
const getFrameGroups = () => {
const frameGroups: FrameGroup[] = [];
- (component.values as EventGroupComponent[])
+ const frames = isStacktraceNewestFirst()
+ ? component.values.reverse()
+ : component.values;
+ (frames as EventGroupComponent[])
.filter(value => groupingComponentFilter(value, showNonContributing))
.forEach(value => {
const key = (value.values as EventGroupComponent[])
diff --git a/static/app/components/events/groupingInfo/groupingInfoSection.spec.tsx b/static/app/components/events/groupingInfo/groupingInfoSection.spec.tsx
index 7b29d7e8e30b77..3e9094272447cd 100644
--- a/static/app/components/events/groupingInfo/groupingInfoSection.spec.tsx
+++ b/static/app/components/events/groupingInfo/groupingInfoSection.spec.tsx
@@ -106,7 +106,7 @@ describe('EventGroupingInfo', function () {
},
});
- await userEvent.click(screen.getAllByRole('button', {name: 'default:XXXX'})[0]);
+ await userEvent.click(screen.getAllByRole('button', {name: 'default:XXXX'})[0]!);
await userEvent.click(screen.getByRole('option', {name: 'new:XXXX'}));
// Should show new hash
diff --git a/static/app/components/events/highlights/editHighlightsModal.spec.tsx b/static/app/components/events/highlights/editHighlightsModal.spec.tsx
index c1b6cbefe1a73c..4b6c7b874fadbe 100644
--- a/static/app/components/events/highlights/editHighlightsModal.spec.tsx
+++ b/static/app/components/events/highlights/editHighlightsModal.spec.tsx
@@ -14,14 +14,12 @@ import {openModal} from 'sentry/actionCreators/modal';
import EditHighlightsModal, {
type EditHighlightsModalProps,
} from 'sentry/components/events/highlights/editHighlightsModal';
-import {
- TEST_EVENT_CONTEXTS,
- TEST_EVENT_TAGS,
-} from 'sentry/components/events/highlights/util.spec';
import ModalStore from 'sentry/stores/modalStore';
import type {Project} from 'sentry/types/project';
import * as analytics from 'sentry/utils/analytics';
+import {TEST_EVENT_CONTEXTS, TEST_EVENT_TAGS} from './testUtils';
+
describe('EditHighlightsModal', function () {
const organization = OrganizationFixture();
const project = ProjectFixture();
@@ -154,7 +152,7 @@ describe('EditHighlightsModal', function () {
const previewCtxButtons = screen.queryAllByTestId('highlights-remove-ctx');
expect(previewCtxButtons).toHaveLength(highlightContextTitles.length);
- await userEvent.click(previewTagButtons[0]);
+ await userEvent.click(previewTagButtons[0]!);
expect(analyticsSpy).toHaveBeenCalledWith(
'highlights.edit_modal.remove_tag',
expect.anything()
@@ -163,7 +161,7 @@ describe('EditHighlightsModal', function () {
previewTagButtons.length - 1
);
- await userEvent.click(previewCtxButtons[0]);
+ await userEvent.click(previewCtxButtons[0]!);
expect(analyticsSpy).toHaveBeenCalledWith(
'highlights.edit_modal.remove_context_key',
expect.anything()
diff --git a/static/app/components/events/highlights/highlightsDataSection.spec.tsx b/static/app/components/events/highlights/highlightsDataSection.spec.tsx
index 54bfef402d4749..7240b8e0368e0a 100644
--- a/static/app/components/events/highlights/highlightsDataSection.spec.tsx
+++ b/static/app/components/events/highlights/highlightsDataSection.spec.tsx
@@ -7,13 +7,11 @@ import {render, screen, userEvent, within} from 'sentry-test/reactTestingLibrary
import * as modal from 'sentry/actionCreators/modal';
import HighlightsDataSection from 'sentry/components/events/highlights/highlightsDataSection';
import {EMPTY_HIGHLIGHT_DEFAULT} from 'sentry/components/events/highlights/util';
-import {
- TEST_EVENT_CONTEXTS,
- TEST_EVENT_TAGS,
-} from 'sentry/components/events/highlights/util.spec';
import ProjectsStore from 'sentry/stores/projectsStore';
import * as analytics from 'sentry/utils/analytics';
+import {TEST_EVENT_CONTEXTS, TEST_EVENT_TAGS} from './testUtils';
+
describe('HighlightsDataSection', function () {
const organization = OrganizationFixture();
const project = ProjectFixture();
@@ -21,7 +19,7 @@ describe('HighlightsDataSection', function () {
contexts: TEST_EVENT_CONTEXTS,
tags: TEST_EVENT_TAGS,
});
- const eventTagMap = TEST_EVENT_TAGS.reduce(
+ const eventTagMap = TEST_EVENT_TAGS.reduce>(
(tagMap, tag) => ({...tagMap, [tag.key]: tag.value}),
{}
);
@@ -100,7 +98,7 @@ describe('HighlightsDataSection', function () {
.closest('div[data-test-id=highlight-tag-row]') as HTMLElement;
// If highlight is present on the event...
if (eventTagMap.hasOwnProperty(tagKey)) {
- expect(within(row).getByText(eventTagMap[tagKey])).toBeInTheDocument();
+ expect(within(row).getByText(eventTagMap[tagKey]!)).toBeInTheDocument();
const highlightTagDropdown = within(row).getByLabelText('Tag Actions Menu');
expect(highlightTagDropdown).toBeInTheDocument();
await userEvent.click(highlightTagDropdown);
@@ -117,7 +115,7 @@ describe('HighlightsDataSection', function () {
}
const ctxRows = screen.queryAllByTestId('highlight-context-row');
- expect(ctxRows.length).toBe(Object.values(highlightContext).flat().length);
+ expect(ctxRows).toHaveLength(Object.values(highlightContext).flat().length);
highlightContextTitles.forEach(title => {
expect(screen.getByText(title)).toBeInTheDocument();
});
diff --git a/static/app/components/events/highlights/highlightsDataSection.tsx b/static/app/components/events/highlights/highlightsDataSection.tsx
index fd5439f38c0c11..524e96fd436faa 100644
--- a/static/app/components/events/highlights/highlightsDataSection.tsx
+++ b/static/app/components/events/highlights/highlightsDataSection.tsx
@@ -54,7 +54,7 @@ function useOpenEditHighlightsModal({
}) {
const organization = useOrganization();
const isProjectAdmin = hasEveryAccess(['project:admin'], {
- organization: organization,
+ organization,
project: detailedProject,
});
@@ -151,7 +151,7 @@ function HighlightsData({
// find the replayId from either context or tags, if it exists
const contextReplayItem = highlightContextDataItems.find(
- e => e.data.length && e.data[0].key === 'replay_id'
+ e => e.data.length && e.data[0]!.key === 'replay_id'
);
const contextReplayId = contextReplayItem?.value ?? EMPTY_HIGHLIGHT_DEFAULT;
diff --git a/static/app/components/events/highlights/highlightsIconSummary.spec.tsx b/static/app/components/events/highlights/highlightsIconSummary.spec.tsx
index c710d35ca96a06..cd285ffe66c8be 100644
--- a/static/app/components/events/highlights/highlightsIconSummary.spec.tsx
+++ b/static/app/components/events/highlights/highlightsIconSummary.spec.tsx
@@ -5,10 +5,8 @@ import {OrganizationFixture} from 'sentry-fixture/organization';
import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
import {HighlightsIconSummary} from 'sentry/components/events/highlights/highlightsIconSummary';
-import {
- TEST_EVENT_CONTEXTS,
- TEST_EVENT_TAGS,
-} from 'sentry/components/events/highlights/util.spec';
+
+import {TEST_EVENT_CONTEXTS, TEST_EVENT_TAGS} from './testUtils';
jest.mock('sentry/components/events/contexts/contextIcon', () => ({
...jest.requireActual('sentry/components/events/contexts/contextIcon'),
diff --git a/static/app/components/events/highlights/testUtils.tsx b/static/app/components/events/highlights/testUtils.tsx
new file mode 100644
index 00000000000000..d08dc448e9b411
--- /dev/null
+++ b/static/app/components/events/highlights/testUtils.tsx
@@ -0,0 +1,64 @@
+export const TEST_EVENT_CONTEXTS = {
+ keyboard: {
+ type: 'default',
+ brand: 'keychron',
+ percent: 75,
+ switches: {
+ form: 'tactile',
+ brand: 'wuque studios',
+ },
+ },
+ client_os: {
+ type: 'os',
+ name: 'Mac OS X',
+ version: '10.15',
+ },
+ runtime: {
+ type: 'runtime',
+ name: 'CPython',
+ version: '3.8.13',
+ },
+};
+
+export const TEST_EVENT_TAGS = [
+ {
+ key: 'browser',
+ value: 'Chrome 1.2.3',
+ },
+ {
+ key: 'browser.name',
+ value: 'Chrome',
+ },
+ {
+ key: 'device.family',
+ value: 'Mac',
+ },
+ {
+ key: 'environment',
+ value: 'production',
+ },
+ {
+ key: 'handled',
+ value: 'no',
+ },
+ {
+ key: 'level',
+ value: 'error',
+ },
+ {
+ key: 'release',
+ value: '1.8',
+ },
+ {
+ key: 'runtime',
+ value: 'CPython 3.8.13',
+ },
+ {
+ key: 'runtime.name',
+ value: 'CPython',
+ },
+ {
+ key: 'url',
+ value: 'https://example.com',
+ },
+];
diff --git a/static/app/components/events/highlights/util.spec.tsx b/static/app/components/events/highlights/util.spec.tsx
index e802cbb80ade5d..0ce2af90019dd6 100644
--- a/static/app/components/events/highlights/util.spec.tsx
+++ b/static/app/components/events/highlights/util.spec.tsx
@@ -9,70 +9,7 @@ import {
getHighlightTagData,
} from 'sentry/components/events/highlights/util';
-export const TEST_EVENT_CONTEXTS = {
- keyboard: {
- type: 'default',
- brand: 'keychron',
- percent: 75,
- switches: {
- form: 'tactile',
- brand: 'wuque studios',
- },
- },
- client_os: {
- type: 'os',
- name: 'Mac OS X',
- version: '10.15',
- },
- runtime: {
- type: 'runtime',
- name: 'CPython',
- version: '3.8.13',
- },
-};
-
-export const TEST_EVENT_TAGS = [
- {
- key: 'browser',
- value: 'Chrome 1.2.3',
- },
- {
- key: 'browser.name',
- value: 'Chrome',
- },
- {
- key: 'device.family',
- value: 'Mac',
- },
- {
- key: 'environment',
- value: 'production',
- },
- {
- key: 'handled',
- value: 'no',
- },
- {
- key: 'level',
- value: 'error',
- },
- {
- key: 'release',
- value: '1.8',
- },
- {
- key: 'runtime',
- value: 'CPython 3.8.13',
- },
- {
- key: 'runtime.name',
- value: 'CPython',
- },
- {
- key: 'url',
- value: 'https://example.com',
- },
-];
+import {TEST_EVENT_CONTEXTS, TEST_EVENT_TAGS} from './testUtils';
describe('getHighlightContextData', function () {
it('returns only highlight context data', function () {
@@ -92,14 +29,14 @@ describe('getHighlightContextData', function () {
location: {query: {}} as Location,
});
expect(highlightCtxData).toHaveLength(1);
- expect(highlightCtxData[0].alias).toBe('keyboard');
- expect(highlightCtxData[0].type).toBe('default');
- expect(highlightCtxData[0].data).toHaveLength(highlightContext.keyboard.length);
- const highlightCtxDataKeys = new Set(highlightCtxData[0].data.map(({key}) => key));
+ expect(highlightCtxData[0]!.alias).toBe('keyboard');
+ expect(highlightCtxData[0]!.type).toBe('default');
+ expect(highlightCtxData[0]!.data).toHaveLength(highlightContext.keyboard.length);
+ const highlightCtxDataKeys = new Set(highlightCtxData[0]!.data.map(({key}) => key));
for (const ctxKey of highlightContext.keyboard) {
expect(highlightCtxDataKeys.has(ctxKey)).toBe(true);
}
- const missingCtxHighlightFromEvent = highlightCtxData[0].data?.find(
+ const missingCtxHighlightFromEvent = highlightCtxData[0]!.data?.find(
d => d.key === missingContextKey
);
expect(missingCtxHighlightFromEvent?.value).toBe(EMPTY_HIGHLIGHT_DEFAULT);
@@ -122,7 +59,7 @@ describe('getHighlightContextData', function () {
location: {query: {}} as Location,
});
expect(highlightCtxData).toHaveLength(1);
- expect(highlightCtxData[0].type).toBe('os');
+ expect(highlightCtxData[0]!.type).toBe('os');
});
});
diff --git a/static/app/components/events/highlights/util.tsx b/static/app/components/events/highlights/util.tsx
index 233ba463aeb5bc..f952127f7869d5 100644
--- a/static/app/components/events/highlights/util.tsx
+++ b/static/app/components/events/highlights/util.tsx
@@ -61,7 +61,7 @@ function getFuzzyHighlightContext(
};
}
- const highlightContextKeys = highlightContextSets[highlightKey];
+ const highlightContextKeys = highlightContextSets[highlightKey]!;
const highlightItems: KeyValueListData = data.filter(
({key, subject}) =>
// We match on key (e.g. 'trace_id') and subject (e.g. 'Trace ID')
diff --git a/static/app/components/events/interfaces/analyzeFrames.spec.tsx b/static/app/components/events/interfaces/analyzeFrames.spec.tsx
index f8395008fcd16e..6cecd73259016f 100644
--- a/static/app/components/events/interfaces/analyzeFrames.spec.tsx
+++ b/static/app/components/events/interfaces/analyzeFrames.spec.tsx
@@ -139,10 +139,10 @@ describe('analyzeAnrFrames', function () {
},
]);
const rootCause = analyzeFramesForRootCause(event);
- expect(rootCause?.resources).toEqual(
+ expect(rootCause?.resources).toBe(
'SharedPreferences.apply will save data on background thread only if it happens before the activity/service finishes. Switch to SharedPreferences.commit and move commit to a background thread.'
);
- expect(rootCause?.culprit).toEqual(
+ expect(rootCause?.culprit).toBe(
'/^android\\.app\\.SharedPreferencesImpl\\$EditorImpl\\$[0-9]/'
);
});
@@ -169,10 +169,10 @@ describe('analyzeAnrFrames', function () {
},
]);
const rootCause = analyzeFramesForRootCause(event);
- expect(rootCause?.resources).toEqual(
+ expect(rootCause?.resources).toBe(
'Database operations, such as querying, inserting, updating, or deleting data, can involve disk I/O, processing, and potentially long-running operations. Move database operations off the main thread to avoid this ANR.'
);
- expect(rootCause?.culprit).toEqual('android.database.sqlite.SQLiteConnection');
+ expect(rootCause?.culprit).toBe('android.database.sqlite.SQLiteConnection');
});
it('picks anr root cause of the topmost frame', function () {
@@ -215,10 +215,10 @@ describe('analyzeAnrFrames', function () {
},
]);
const rootCause = analyzeFramesForRootCause(event);
- expect(rootCause?.resources).toEqual(
+ expect(rootCause?.resources).toBe(
'SharedPreferences.apply will save data on background thread only if it happens before the activity/service finishes. Switch to SharedPreferences.commit and move commit to a background thread.'
);
- expect(rootCause?.culprit).toEqual(
+ expect(rootCause?.culprit).toBe(
'/^android\\.app\\.SharedPreferencesImpl\\$EditorImpl\\$[0-9]/'
);
});
diff --git a/static/app/components/events/interfaces/analyzeFrames.tsx b/static/app/components/events/interfaces/analyzeFrames.tsx
index 60d6a2eb90e337..bc784f0b76b79e 100644
--- a/static/app/components/events/interfaces/analyzeFrames.tsx
+++ b/static/app/components/events/interfaces/analyzeFrames.tsx
@@ -171,7 +171,7 @@ function satisfiesFunctionCondition(frame: Frame, suspect: SuspectFrame) {
return false;
}
for (let index = 0; index < suspect.functions.length; index++) {
- const matchFuction = suspect.functions[index];
+ const matchFuction = suspect.functions[index]!;
const match =
typeof matchFuction === 'string'
? frame.function === matchFuction
@@ -219,7 +219,7 @@ export function analyzeFramesForRootCause(event: Event): {
// iterating the frames in reverse order, because the topmost frames most like the root cause
for (let index = exceptionFrames.length - 1; index >= 0; index--) {
- const frame = exceptionFrames[index];
+ const frame = exceptionFrames[index]!;
const rootCause = analyzeFrameForRootCause(frame, currentThread);
if (defined(rootCause)) {
return rootCause;
diff --git a/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.spec.tsx b/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.spec.tsx
index 27a6ac62ea07e7..17046a88659e67 100644
--- a/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.spec.tsx
+++ b/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.spec.tsx
@@ -10,7 +10,7 @@ import {BreadcrumbLevelType, BreadcrumbType} from 'sentry/types/breadcrumbs';
import useProjects from 'sentry/utils/useProjects';
jest.mock('sentry/utils/replays/hooks/useReplayOnboarding');
-jest.mock('sentry/utils/replays/hooks/useReplayReader');
+jest.mock('sentry/utils/replays/hooks/useLoadReplayReader');
jest.mock('sentry/utils/useProjects');
describe('Breadcrumbs', () => {
diff --git a/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.tsx b/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.tsx
index 516b532fdf0e17..4945d94132178c 100644
--- a/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.tsx
+++ b/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.tsx
@@ -67,15 +67,15 @@ function renderBreadCrumbRow({index, key, parent, style}: RenderBreadCrumbRowPro
>
f.value === `type-${breadcrumb.type}`
+ f => f.value === `type-${breadcrumb!.type}`
);
if (foundFilterType === -1) {
filterTypes.push({
- value: `type-${breadcrumb.type}`,
- leadingItems: ,
- label: breadcrumb.description,
- levels: breadcrumb?.level ? [breadcrumb.level] : [],
+ value: `type-${breadcrumb!.type}`,
+ leadingItems: ,
+ label: breadcrumb!.description,
+ levels: breadcrumb!.level ? [breadcrumb!.level] : [],
});
continue;
}
if (
breadcrumb?.level &&
- !filterTypes[foundFilterType].levels?.includes(breadcrumb.level)
+ !filterTypes[foundFilterType]!.levels?.includes(breadcrumb.level)
) {
- filterTypes[foundFilterType].levels?.push(breadcrumb.level);
+ filterTypes[foundFilterType]!.levels?.push(breadcrumb!.level);
}
}
@@ -176,8 +176,8 @@ function BreadcrumbsContainer({data, event, organization, hideTitle = false}: Pr
const filterLevels: SelectOption[] = [];
for (const indexType in types) {
- for (const indexLevel in types[indexType].levels) {
- const level = types[indexType].levels?.[indexLevel];
+ for (const indexLevel in types[indexType]!.levels) {
+ const level = types[indexType]!.levels?.[indexLevel];
if (filterLevels.some(f => f.value === `level-${level}`)) {
continue;
diff --git a/static/app/components/events/interfaces/crashContent/exception/actionableItems.tsx b/static/app/components/events/interfaces/crashContent/exception/actionableItems.tsx
index 0b27a0292ced12..586eee6fd10fa1 100644
--- a/static/app/components/events/interfaces/crashContent/exception/actionableItems.tsx
+++ b/static/app/components/events/interfaces/crashContent/exception/actionableItems.tsx
@@ -246,7 +246,7 @@ interface ExpandableErrorListProps {
function ExpandableErrorList({handleExpandClick, errorList}: ExpandableErrorListProps) {
const [expanded, setExpanded] = useState(false);
- const firstError = errorList[0];
+ const firstError = errorList[0]!;
const {title, desc, type} = firstError;
const numErrors = errorList.length;
const errorDataList = errorList.map(error => error.data ?? {});
@@ -291,7 +291,6 @@ function ExpandableErrorList({handleExpandClick, errorList}: ExpandableErrorList
});
});
return cleaned;
- // eslint-disable-next-line react-hooks/exhaustive-deps
}, [errorDataList]);
return (
diff --git a/static/app/components/events/interfaces/crashContent/exception/content.spec.tsx b/static/app/components/events/interfaces/crashContent/exception/content.spec.tsx
index f4fc56d43c2b0e..3819a48b72a534 100644
--- a/static/app/components/events/interfaces/crashContent/exception/content.spec.tsx
+++ b/static/app/components/events/interfaces/crashContent/exception/content.spec.tsx
@@ -136,7 +136,7 @@ describe('Exception Content', function () {
newestFirst
stackView={StackView.APP}
event={event}
- values={event.entries[0].data.values}
+ values={event.entries[0]!.data.values}
meta={event._meta!.entries[0].data.values}
projectSlug={project.slug}
/>,
@@ -145,7 +145,7 @@ describe('Exception Content', function () {
expect(screen.getAllByText(/redacted/)).toHaveLength(2);
- await userEvent.hover(screen.getAllByText(/redacted/)[0]);
+ await userEvent.hover(screen.getAllByText(/redacted/)[0]!);
expect(
await screen.findByText(
@@ -200,7 +200,7 @@ describe('Exception Content', function () {
type={StackType.ORIGINAL}
stackView={StackView.APP}
event={event}
- values={event.entries[0].data.values}
+ values={event.entries[0]!.data.values}
projectSlug={project.slug}
/>
);
@@ -242,7 +242,7 @@ describe('Exception Content', function () {
platform: 'python' as const,
stackView: StackView.APP,
event,
- values: event.entries[0].data.values,
+ values: event.entries[0]!.data.values,
projectSlug: project.slug,
};
@@ -252,9 +252,9 @@ describe('Exception Content', function () {
const exceptions = screen.getAllByTestId('exception-value');
// First exception should be the parent ExceptionGroup
- expect(within(exceptions[0]).getByText('ExceptionGroup 1')).toBeInTheDocument();
+ expect(within(exceptions[0]!).getByText('ExceptionGroup 1')).toBeInTheDocument();
expect(
- within(exceptions[0]).getByRole('heading', {name: 'ExceptionGroup 1'})
+ within(exceptions[0]!).getByRole('heading', {name: 'ExceptionGroup 1'})
).toBeInTheDocument();
});
@@ -263,7 +263,7 @@ describe('Exception Content', function () {
const exceptions = screen.getAllByTestId('exception-value');
- const exceptionGroupWithNoContext = exceptions[2];
+ const exceptionGroupWithNoContext = exceptions[2]!;
expect(
within(exceptionGroupWithNoContext).getByText('Related Exceptions')
).toBeInTheDocument();
@@ -273,7 +273,7 @@ describe('Exception Content', function () {
render( );
// There are 4 values, but 1 should be hidden
- expect(screen.getAllByTestId('exception-value').length).toBe(3);
+ expect(screen.getAllByTestId('exception-value')).toHaveLength(3);
expect(screen.queryByRole('heading', {name: 'ValueError'})).not.toBeInTheDocument();
await userEvent.click(
@@ -281,7 +281,7 @@ describe('Exception Content', function () {
);
// After expanding, ValueError should be visible
- expect(screen.getAllByTestId('exception-value').length).toBe(4);
+ expect(screen.getAllByTestId('exception-value')).toHaveLength(4);
expect(screen.getByRole('heading', {name: 'ValueError'})).toBeInTheDocument();
await userEvent.click(
@@ -289,7 +289,7 @@ describe('Exception Content', function () {
);
// After collapsing, ValueError should be gone again
- expect(screen.getAllByTestId('exception-value').length).toBe(3);
+ expect(screen.getAllByTestId('exception-value')).toHaveLength(3);
expect(screen.queryByRole('heading', {name: 'ValueError'})).not.toBeInTheDocument();
});
diff --git a/static/app/components/events/interfaces/crashContent/exception/content.tsx b/static/app/components/events/interfaces/crashContent/exception/content.tsx
index 7ba4654d88c7bf..b98f29252e5517 100644
--- a/static/app/components/events/interfaces/crashContent/exception/content.tsx
+++ b/static/app/components/events/interfaces/crashContent/exception/content.tsx
@@ -151,7 +151,7 @@ export function Content({
const frameSourceMapDebuggerData = sourceMapDebuggerData?.exceptions[
excIdx
- ].frames.map(debuggerFrame =>
+ ]!.frames.map(debuggerFrame =>
prepareSourceMapDebuggerFrameInformation(
sourceMapDebuggerData,
debuggerFrame,
diff --git a/static/app/components/events/interfaces/crashContent/exception/relatedExceptions.spec.tsx b/static/app/components/events/interfaces/crashContent/exception/relatedExceptions.spec.tsx
index 1f261acefc8570..986fb9fc2a042e 100644
--- a/static/app/components/events/interfaces/crashContent/exception/relatedExceptions.spec.tsx
+++ b/static/app/components/events/interfaces/crashContent/exception/relatedExceptions.spec.tsx
@@ -31,14 +31,14 @@ describe('ExceptionGroupContext', function () {
expect(items).toHaveLength(3);
// ExceptionGroup should not link to itself
- expect(within(items[0]).getByText('ExceptionGroup 1: parent')).toBeInTheDocument();
+ expect(within(items[0]!).getByText('ExceptionGroup 1: parent')).toBeInTheDocument();
// Should have a link to TypeError exception
expect(
- within(items[1]).getByRole('button', {name: 'TypeError: nested'})
+ within(items[1]!).getByRole('button', {name: 'TypeError: nested'})
).toBeInTheDocument();
// Should have a link to child exception group
expect(
- within(items[2]).getByRole('button', {name: 'ExceptionGroup 2: child'})
+ within(items[2]!).getByRole('button', {name: 'ExceptionGroup 2: child'})
).toBeInTheDocument();
});
@@ -54,8 +54,8 @@ describe('ExceptionGroupContext', function () {
const children = screen.getAllByRole('button');
// Order should be oldest to newest, opposite fo the previous test
- expect(within(children[0]).getByText(/ExceptionGroup 2/i)).toBeInTheDocument();
- expect(within(children[1]).getByText(/TypeError/i)).toBeInTheDocument();
+ expect(within(children[0]!).getByText(/ExceptionGroup 2/i)).toBeInTheDocument();
+ expect(within(children[1]!).getByText(/TypeError/i)).toBeInTheDocument();
});
it('renders tree with child exception group', function () {
@@ -66,13 +66,13 @@ describe('ExceptionGroupContext', function () {
// Should show and link to parent exception group
expect(
- within(items[0]).getByRole('button', {name: 'ExceptionGroup 1: parent'})
+ within(items[0]!).getByRole('button', {name: 'ExceptionGroup 1: parent'})
).toBeInTheDocument();
// Should have a link to child exception group
- expect(within(items[1]).getByText('ExceptionGroup 2: child')).toBeInTheDocument();
+ expect(within(items[1]!).getByText('ExceptionGroup 2: child')).toBeInTheDocument();
// Show show and link to child exception
expect(
- within(items[2]).getByRole('button', {name: 'ValueError: test'})
+ within(items[2]!).getByRole('button', {name: 'ValueError: test'})
).toBeInTheDocument();
});
diff --git a/static/app/components/events/interfaces/crashContent/exception/sourceMapDebug.spec.tsx b/static/app/components/events/interfaces/crashContent/exception/sourceMapDebug.spec.tsx
index 7b2e52c3e70652..b9c6c3a3cd5bad 100644
--- a/static/app/components/events/interfaces/crashContent/exception/sourceMapDebug.spec.tsx
+++ b/static/app/components/events/interfaces/crashContent/exception/sourceMapDebug.spec.tsx
@@ -76,7 +76,7 @@ describe('SourceMapDebug', () => {
it('should use unqiue in app frames', () => {
expect(debugFrames).toHaveLength(1);
- expect(debugFrames[0].filename).toBe(
+ expect(debugFrames[0]!.filename).toBe(
'./app/views/organizationStats/teamInsights/controls.tsx'
);
});
diff --git a/static/app/components/events/interfaces/crashContent/exception/stackTrace.spec.tsx b/static/app/components/events/interfaces/crashContent/exception/stackTrace.spec.tsx
index 089dc288dc3362..124ff840ccc95a 100644
--- a/static/app/components/events/interfaces/crashContent/exception/stackTrace.spec.tsx
+++ b/static/app/components/events/interfaces/crashContent/exception/stackTrace.spec.tsx
@@ -121,7 +121,7 @@ describe('ExceptionStacktraceContent', function () {
render(
);
expect(
@@ -143,7 +143,7 @@ describe('ExceptionStacktraceContent', function () {
render(
);
@@ -153,9 +153,9 @@ describe('ExceptionStacktraceContent', function () {
expect(screen.getAllByRole('listitem')).toHaveLength(2);
// inApp === true
- expect(screen.getAllByRole('listitem')[1]).toHaveTextContent(frames[0].filename);
+ expect(screen.getAllByRole('listitem')[1]).toHaveTextContent(frames[0]!.filename);
// inApp === false
- expect(screen.getAllByRole('listitem')[0]).toHaveTextContent(frames[1].filename);
+ expect(screen.getAllByRole('listitem')[0]).toHaveTextContent(frames[1]!.filename);
});
});
diff --git a/static/app/components/events/interfaces/crashContent/exception/useSourceMapDebug.spec.tsx b/static/app/components/events/interfaces/crashContent/exception/useSourceMapDebug.spec.tsx
index c8177decf18622..952fcd60e859a6 100644
--- a/static/app/components/events/interfaces/crashContent/exception/useSourceMapDebug.spec.tsx
+++ b/static/app/components/events/interfaces/crashContent/exception/useSourceMapDebug.spec.tsx
@@ -7,8 +7,8 @@ import {getUniqueFilesFromException} from './useSourceMapDebug';
function modifyEventFrames(event: Event, modify: any): Event {
const modifiedEvent = cloneDeep(event);
- modifiedEvent.entries[0].data.values[0].stacktrace.frames =
- event.entries[0].data.values[0].stacktrace.frames.map(frame => ({
+ modifiedEvent.entries[0]!.data.values[0].stacktrace.frames =
+ event.entries[0]!.data.values[0].stacktrace.frames.map(frame => ({
...frame,
...modify,
}));
@@ -23,7 +23,7 @@ describe('getUniqueFilesFromException', () => {
platform: 'javascript',
});
const result = getUniqueFilesFromException(
- (event.entries as EntryException[])[0].data.values!,
+ (event.entries as EntryException[])[0]!.data.values!,
props
);
@@ -48,7 +48,7 @@ describe('getUniqueFilesFromException', () => {
{filename: ''}
);
const result = getUniqueFilesFromException(
- (event.entries as EntryException[])[0].data.values!,
+ (event.entries as EntryException[])[0]!.data.values!,
props
);
@@ -63,7 +63,7 @@ describe('getUniqueFilesFromException', () => {
{absPath: '~/myfile.js', filename: '~/myfile.js'}
);
const result = getUniqueFilesFromException(
- (event.entries as EntryException[])[0].data.values!,
+ (event.entries as EntryException[])[0]!.data.values!,
props
);
diff --git a/static/app/components/events/interfaces/crashContent/exception/utils.tsx b/static/app/components/events/interfaces/crashContent/exception/utils.tsx
index 7ed48748022b8b..c661baa6d43e32 100644
--- a/static/app/components/events/interfaces/crashContent/exception/utils.tsx
+++ b/static/app/components/events/interfaces/crashContent/exception/utils.tsx
@@ -16,7 +16,7 @@ export function isFrameFilenamePathlike(frame: Frame): boolean {
const parsedURL = safeURL(filename);
if (parsedURL) {
- filename = parsedURL.pathname.split('/').reverse()[0];
+ filename = parsedURL.pathname.split('/').reverse()[0]!;
}
return (
@@ -58,7 +58,7 @@ export const renderLinksInText = ({
const urls = exceptionText.match(urlRegex) || [];
const elements = parts.flatMap((part, index) => {
- const url = urls[index];
+ const url = urls[index]!;
const isUrlValid = isUrl(url);
let link: ReactElement | undefined;
diff --git a/static/app/components/events/interfaces/crashContent/stackTrace/content.spec.tsx b/static/app/components/events/interfaces/crashContent/stackTrace/content.spec.tsx
index b93c928f79b8a2..082149b0f3f85b 100644
--- a/static/app/components/events/interfaces/crashContent/stackTrace/content.spec.tsx
+++ b/static/app/components/events/interfaces/crashContent/stackTrace/content.spec.tsx
@@ -116,15 +116,15 @@ describe('StackTrace', function () {
const frameTitles = screen.getAllByTestId('title');
// collapse the expanded frame (by default)
- await userEvent.click(frameTitles[0]);
+ await userEvent.click(frameTitles[0]!);
// all frames are now collapsed
expect(screen.queryByTestId('toggle-button-expanded')).not.toBeInTheDocument();
expect(screen.getAllByTestId('toggle-button-collapsed')).toHaveLength(5);
// expand penultimate and last frame
- await userEvent.click(frameTitles[frameTitles.length - 2]);
- await userEvent.click(frameTitles[frameTitles.length - 1]);
+ await userEvent.click(frameTitles[frameTitles.length - 2]!);
+ await userEvent.click(frameTitles[frameTitles.length - 1]!);
// two frames are now collapsed
expect(screen.getAllByTestId('toggle-button-expanded')).toHaveLength(2);
@@ -154,8 +154,8 @@ describe('StackTrace', function () {
const collapsedToggleButtons = screen.getAllByTestId('toggle-button-collapsed');
// expand penultimate and last frame
- await userEvent.click(collapsedToggleButtons[collapsedToggleButtons.length - 2]);
- await userEvent.click(collapsedToggleButtons[collapsedToggleButtons.length - 1]);
+ await userEvent.click(collapsedToggleButtons[collapsedToggleButtons.length - 2]!);
+ await userEvent.click(collapsedToggleButtons[collapsedToggleButtons.length - 1]!);
// two frames are now collapsed
expect(screen.getAllByTestId('toggle-button-expanded')).toHaveLength(2);
@@ -188,7 +188,7 @@ describe('StackTrace', function () {
it('does not render non in app tags', function () {
const dataFrames = [...data.frames];
- dataFrames[0] = {...dataFrames[0], inApp: false};
+ dataFrames[0] = {...dataFrames[0]!, inApp: false};
const newData = {
...data,
@@ -204,7 +204,7 @@ describe('StackTrace', function () {
it('displays a toggle button when there is more than one non-inapp frame', function () {
const dataFrames = [...data.frames];
- dataFrames[0] = {...dataFrames[0], inApp: true};
+ dataFrames[0] = {...dataFrames[0]!, inApp: true};
const newData = {
...data,
@@ -221,11 +221,11 @@ describe('StackTrace', function () {
it('shows/hides frames when toggle button clicked', async function () {
const dataFrames = [...data.frames];
- dataFrames[0] = {...dataFrames[0], inApp: true};
- dataFrames[1] = {...dataFrames[1], function: 'non-in-app-frame'};
- dataFrames[2] = {...dataFrames[2], function: 'non-in-app-frame'};
- dataFrames[3] = {...dataFrames[3], function: 'non-in-app-frame'};
- dataFrames[4] = {...dataFrames[4], function: 'non-in-app-frame'};
+ dataFrames[0] = {...dataFrames[0]!, inApp: true};
+ dataFrames[1] = {...dataFrames[1]!, function: 'non-in-app-frame'};
+ dataFrames[2] = {...dataFrames[2]!, function: 'non-in-app-frame'};
+ dataFrames[3] = {...dataFrames[3]!, function: 'non-in-app-frame'};
+ dataFrames[4] = {...dataFrames[4]!, function: 'non-in-app-frame'};
const newData = {
...data,
@@ -244,9 +244,9 @@ describe('StackTrace', function () {
it('does not display a toggle button when there is only one non-inapp frame', function () {
const dataFrames = [...data.frames];
- dataFrames[0] = {...dataFrames[0], inApp: true};
- dataFrames[2] = {...dataFrames[2], inApp: true};
- dataFrames[4] = {...dataFrames[4], inApp: true};
+ dataFrames[0] = {...dataFrames[0]!, inApp: true};
+ dataFrames[2] = {...dataFrames[2]!, inApp: true};
+ dataFrames[4] = {...dataFrames[4]!, inApp: true};
const newData = {
...data,
@@ -269,7 +269,7 @@ describe('StackTrace', function () {
...data,
hasSystemFrames: true,
frames: [
- {...dataFrames[0], inApp: true},
+ {...dataFrames[0]!, inApp: true},
...dataFrames.splice(1, dataFrames.length),
],
};
@@ -304,7 +304,7 @@ describe('StackTrace', function () {
registers: {},
frames: [
...dataFrames.splice(0, dataFrames.length - 1),
- {...dataFrames[dataFrames.length - 1], inApp: true},
+ {...dataFrames[dataFrames.length - 1]!, inApp: true},
],
};
@@ -339,7 +339,7 @@ describe('StackTrace', function () {
hasSystemFrames: true,
frames: [
...dataFrames.slice(0, 1),
- {...dataFrames[1], inApp: true},
+ {...dataFrames[1]!, inApp: true},
...dataFrames.slice(2, dataFrames.length),
],
};
@@ -375,7 +375,7 @@ describe('StackTrace', function () {
...data,
hasSystemFrames: true,
frames: [
- {...dataFrames[0], inApp: true},
+ {...dataFrames[0]!, inApp: true},
...dataFrames.splice(1, dataFrames.length),
],
};
@@ -409,7 +409,7 @@ describe('StackTrace', function () {
...data,
hasSystemFrames: true,
frames: [
- {...dataFrames[0], inApp: true},
+ {...dataFrames[0]!, inApp: true},
...dataFrames.splice(1, dataFrames.length),
],
};
diff --git a/static/app/components/events/interfaces/crashContent/stackTrace/content.tsx b/static/app/components/events/interfaces/crashContent/stackTrace/content.tsx
index f3cc30ca29ba0e..620e727d8635b0 100644
--- a/static/app/components/events/interfaces/crashContent/stackTrace/content.tsx
+++ b/static/app/components/events/interfaces/crashContent/stackTrace/content.tsx
@@ -86,7 +86,7 @@ function Content({
function setInitialFrameMap(): {[frameIndex: number]: boolean} {
const indexMap: Record = {};
(data.frames ?? []).forEach((frame, frameIdx) => {
- const nextFrame = (data.frames ?? [])[frameIdx + 1];
+ const nextFrame = (data.frames ?? [])[frameIdx + 1]!;
const repeatedFrame = isRepeatedFrame(frame, nextFrame);
if (frameIsVisible(frame, nextFrame) && !repeatedFrame && !frame.inApp) {
indexMap[frameIdx] = false;
@@ -99,7 +99,7 @@ function Content({
let count = 0;
const countMap: Record = {};
(data.frames ?? []).forEach((frame, frameIdx) => {
- const nextFrame = (data.frames ?? [])[frameIdx + 1];
+ const nextFrame = (data.frames ?? [])[frameIdx + 1]!;
const repeatedFrame = isRepeatedFrame(frame, nextFrame);
if (frameIsVisible(frame, nextFrame) && !repeatedFrame && !frame.inApp) {
countMap[frameIdx] = count;
@@ -118,8 +118,8 @@ function Content({
return false;
}
- const lastFrame = frames[frames.length - 1];
- const penultimateFrame = frames[frames.length - 2];
+ const lastFrame = frames[frames.length - 1]!;
+ const penultimateFrame = frames[frames.length - 2]!;
return penultimateFrame.inApp && !lastFrame.inApp;
}
@@ -205,7 +205,7 @@ function Content({
let convertedFrames = frames
.map((frame, frameIndex) => {
const prevFrame = frames[frameIndex - 1];
- const nextFrame = frames[frameIndex + 1];
+ const nextFrame = frames[frameIndex + 1]!;
const repeatedFrame = isRepeatedFrame(frame, nextFrame);
if (repeatedFrame) {
@@ -284,7 +284,7 @@ function Content({
if (convertedFrames.length > 0 && registers) {
const lastFrame = convertedFrames.length - 1;
- convertedFrames[lastFrame] = cloneElement(convertedFrames[lastFrame], {
+ convertedFrames[lastFrame] = cloneElement(convertedFrames[lastFrame]!, {
registers,
});
}
diff --git a/static/app/components/events/interfaces/crashContent/stackTrace/nativeContent.spec.tsx b/static/app/components/events/interfaces/crashContent/stackTrace/nativeContent.spec.tsx
index 8b35b2cca3a923..ee730e66b6d972 100644
--- a/static/app/components/events/interfaces/crashContent/stackTrace/nativeContent.spec.tsx
+++ b/static/app/components/events/interfaces/crashContent/stackTrace/nativeContent.spec.tsx
@@ -67,7 +67,7 @@ describe('Native StackTrace', function () {
});
it('does not render non in app tags', function () {
const dataFrames = [...data.frames];
- dataFrames[0] = {...dataFrames[0], inApp: false};
+ dataFrames[0] = {...dataFrames[0]!, inApp: false};
const newData = {
...data,
@@ -83,7 +83,7 @@ describe('Native StackTrace', function () {
it('displays a toggle button when there is more than one non-inapp frame', function () {
const dataFrames = [...data.frames];
- dataFrames[0] = {...dataFrames[0], inApp: true};
+ dataFrames[0] = {...dataFrames[0]!, inApp: true};
const newData = {
...data,
@@ -100,11 +100,11 @@ describe('Native StackTrace', function () {
it('shows/hides frames when toggle button clicked', async function () {
const dataFrames = [...data.frames];
- dataFrames[0] = {...dataFrames[0], inApp: true};
- dataFrames[1] = {...dataFrames[1], function: 'non-in-app-frame'};
- dataFrames[2] = {...dataFrames[2], function: 'non-in-app-frame'};
- dataFrames[3] = {...dataFrames[3], function: 'non-in-app-frame'};
- dataFrames[4] = {...dataFrames[4], function: 'non-in-app-frame'};
+ dataFrames[0] = {...dataFrames[0]!, inApp: true};
+ dataFrames[1] = {...dataFrames[1]!, function: 'non-in-app-frame'};
+ dataFrames[2] = {...dataFrames[2]!, function: 'non-in-app-frame'};
+ dataFrames[3] = {...dataFrames[3]!, function: 'non-in-app-frame'};
+ dataFrames[4] = {...dataFrames[4]!, function: 'non-in-app-frame'};
const newData = {
...data,
@@ -123,9 +123,9 @@ describe('Native StackTrace', function () {
it('does not display a toggle button when there is only one non-inapp frame', function () {
const dataFrames = [...data.frames];
- dataFrames[0] = {...dataFrames[0], inApp: true};
- dataFrames[2] = {...dataFrames[2], inApp: true};
- dataFrames[4] = {...dataFrames[4], inApp: true};
+ dataFrames[0] = {...dataFrames[0]!, inApp: true};
+ dataFrames[2] = {...dataFrames[2]!, inApp: true};
+ dataFrames[4] = {...dataFrames[4]!, inApp: true};
const newData = {
...data,
@@ -165,10 +165,12 @@ describe('Native StackTrace', function () {
const frames = screen.getAllByTestId('stack-trace-frame');
- expect(within(frames[0]).getByTestId('symbolication-error-icon')).toBeInTheDocument();
expect(
- within(frames[1]).getByTestId('symbolication-warning-icon')
+ within(frames[0]!).getByTestId('symbolication-error-icon')
).toBeInTheDocument();
- expect(within(frames[2]).queryByTestId(/symbolication/)).not.toBeInTheDocument();
+ expect(
+ within(frames[1]!).getByTestId('symbolication-warning-icon')
+ ).toBeInTheDocument();
+ expect(within(frames[2]!).queryByTestId(/symbolication/)).not.toBeInTheDocument();
});
});
diff --git a/static/app/components/events/interfaces/crashContent/stackTrace/nativeContent.tsx b/static/app/components/events/interfaces/crashContent/stackTrace/nativeContent.tsx
index 53becef22c7c10..b6032c1ff7bdb4 100644
--- a/static/app/components/events/interfaces/crashContent/stackTrace/nativeContent.tsx
+++ b/static/app/components/events/interfaces/crashContent/stackTrace/nativeContent.tsx
@@ -74,7 +74,7 @@ export function NativeContent({
function setInitialFrameMap(): {[frameIndex: number]: boolean} {
const indexMap = {};
(data.frames ?? []).forEach((frame, frameIdx) => {
- const nextFrame = (data.frames ?? [])[frameIdx + 1];
+ const nextFrame = (data.frames ?? [])[frameIdx + 1]!;
const repeatedFrame = isRepeatedFrame(frame, nextFrame);
if (frameIsVisible(frame, nextFrame) && !repeatedFrame && !frame.inApp) {
indexMap[frameIdx] = false;
@@ -87,7 +87,7 @@ export function NativeContent({
let count = 0;
const countMap = {};
(data.frames ?? []).forEach((frame, frameIdx) => {
- const nextFrame = (data.frames ?? [])[frameIdx + 1];
+ const nextFrame = (data.frames ?? [])[frameIdx + 1]!;
const repeatedFrame = isRepeatedFrame(frame, nextFrame);
if (frameIsVisible(frame, nextFrame) && !repeatedFrame && !frame.inApp) {
countMap[frameIdx] = count;
@@ -180,7 +180,7 @@ export function NativeContent({
let convertedFrames = frames
.map((frame, frameIndex) => {
const prevFrame = frames[frameIndex - 1];
- const nextFrame = frames[frameIndex + 1];
+ const nextFrame = frames[frameIndex + 1]!;
const repeatedFrame = isRepeatedFrame(frame, nextFrame);
if (repeatedFrame) {
@@ -260,7 +260,7 @@ export function NativeContent({
if (convertedFrames.length > 0 && registers) {
const lastFrame = convertedFrames.length - 1;
- convertedFrames[lastFrame] = cloneElement(convertedFrames[lastFrame], {
+ convertedFrames[lastFrame] = cloneElement(convertedFrames[lastFrame]!, {
registers,
});
}
diff --git a/static/app/components/events/interfaces/crashContent/stackTrace/rawContent.spec.tsx b/static/app/components/events/interfaces/crashContent/stackTrace/rawContent.spec.tsx
index b53c5a6a694736..57ea9b323495d3 100644
--- a/static/app/components/events/interfaces/crashContent/stackTrace/rawContent.spec.tsx
+++ b/static/app/components/events/interfaces/crashContent/stackTrace/rawContent.spec.tsx
@@ -19,7 +19,7 @@ describe('RawStacktraceContent', () => {
lineNo: 582,
})
)
- ).toEqual(
+ ).toBe(
' at org.mortbay.thread.QueuedThreadPool$PoolThread.run(QueuedThreadPool.java:582)'
);
@@ -32,7 +32,7 @@ describe('RawStacktraceContent', () => {
filename: 'QueuedThreadPool.java',
})
)
- ).toEqual(
+ ).toBe(
' at org.mortbay.thread.QueuedThreadPool$PoolThread.run(QueuedThreadPool.java)'
);
@@ -45,7 +45,7 @@ describe('RawStacktraceContent', () => {
filename: 'QueuedThreadPool.java',
})
)
- ).toEqual(
+ ).toBe(
' at org.mortbay.thread.QueuedThreadPool$PoolThread.run(QueuedThreadPool.java)'
);
});
@@ -61,7 +61,7 @@ describe('RawStacktraceContent', () => {
module: undefined,
})
)
- ).toEqual('Baz: message');
+ ).toBe('Baz: message');
});
it('takes a module name', () => {
@@ -73,7 +73,7 @@ describe('RawStacktraceContent', () => {
value: 'message',
})
)
- ).toEqual('foo.bar.Baz: message');
+ ).toBe('foo.bar.Baz: message');
});
});
@@ -107,7 +107,7 @@ describe('RawStacktraceContent', () => {
};
it('renders java example', () => {
- expect(displayRawContent(data, 'java', exception)).toEqual(
+ expect(displayRawContent(data, 'java', exception)).toBe(
`example.application.Error: an error occurred
at example.application.doThing(application:2)
at example.application.main(application:1)`
@@ -115,7 +115,7 @@ describe('RawStacktraceContent', () => {
});
it('renders python example', () => {
- expect(displayRawContent(data, 'python', exception)).toEqual(
+ expect(displayRawContent(data, 'python', exception)).toBe(
`Error: an error occurred
File "application", line 1, in main
File "application", line 2, in doThing`
@@ -153,7 +153,7 @@ describe('RawStacktraceContent', () => {
}),
],
};
- expect(displayRawContent(dartData, 'dart', exception)).toEqual(
+ expect(displayRawContent(dartData, 'dart', exception)).toBe(
`Error: an error occurred
#0 main (package:sentry_flutter/main.dart:778:5)
#1
@@ -161,7 +161,7 @@ describe('RawStacktraceContent', () => {
);
});
- const inAppFrame = (fnName, line) =>
+ const inAppFrame = (fnName: string, line: number) =>
FrameFixture({
function: fnName,
module: 'example.application',
@@ -169,7 +169,7 @@ describe('RawStacktraceContent', () => {
filename: 'application',
platform: undefined,
});
- const systemFrame = (fnName, line) =>
+ const systemFrame = (fnName: string, line: number) =>
FrameFixture({
function: fnName,
module: 'example.application',
@@ -203,7 +203,7 @@ describe('RawStacktraceContent', () => {
it.each([onlyInAppFrames, onlySystemFrames, mixedFrames])(
'renders all frames when similarity flag is off, in-app or not',
stacktrace => {
- expect(displayRawContent(stacktrace, 'python', exception)).toEqual(
+ expect(displayRawContent(stacktrace, 'python', exception)).toBe(
`Error: an error occurred
File "application", line 1, in main
File "application", line 2, in doThing`
@@ -221,7 +221,7 @@ describe('RawStacktraceContent', () => {
exception,
similarityFeatureEnabled
)
- ).toEqual(
+ ).toBe(
`Error: an error occurred
File "application", line 1, in main
File "application", line 2, in doThing`
@@ -230,7 +230,7 @@ describe('RawStacktraceContent', () => {
);
it('renders only in-app frames when they exist and hasSimilarityEmbeddingsFeature is on', () => {
- expect(displayRawContent(mixedFrames, 'python', exception, true)).toEqual(
+ expect(displayRawContent(mixedFrames, 'python', exception, true)).toBe(
`Error: an error occurred
File "application", line 1, in main`
);
diff --git a/static/app/components/events/interfaces/crons/cronTimelineSection.tsx b/static/app/components/events/interfaces/crons/cronTimelineSection.tsx
index 53bd9bc1d4eab7..de2d12be6c5446 100644
--- a/static/app/components/events/interfaces/crons/cronTimelineSection.tsx
+++ b/static/app/components/events/interfaces/crons/cronTimelineSection.tsx
@@ -120,7 +120,7 @@ export function CronTimelineSection({event, organization, project}: Props) {
diff --git a/static/app/components/events/interfaces/csp/index.spec.tsx b/static/app/components/events/interfaces/csp/index.spec.tsx
index 4d7b1efc0d91df..048f83cd478626 100644
--- a/static/app/components/events/interfaces/csp/index.spec.tsx
+++ b/static/app/components/events/interfaces/csp/index.spec.tsx
@@ -21,7 +21,7 @@ describe('Csp report entry', function () {
},
},
});
- render( , {
+ render( , {
organization: {
relayPiiConfig: JSON.stringify(DataScrubbingRelayPiiConfigFixture()),
},
diff --git a/static/app/components/events/interfaces/debugMeta/index.tsx b/static/app/components/events/interfaces/debugMeta/index.tsx
index 9ee105aa9b2231..d48bec86431180 100644
--- a/static/app/components/events/interfaces/debugMeta/index.tsx
+++ b/static/app/components/events/interfaces/debugMeta/index.tsx
@@ -82,8 +82,8 @@ function applyImageFilters(
if (term.indexOf('0x') === 0) {
const needle = parseAddress(term);
if (needle > 0 && image.image_addr !== '0x0') {
- const [startAddress, endAddress] = getImageRange(image as any); // TODO(PRISCILA): remove any
- return needle >= startAddress && needle < endAddress;
+ const [startAddress, endAddress] = getImageRange(image);
+ return needle >= startAddress! && needle < endAddress!;
}
}
@@ -184,7 +184,7 @@ export function DebugMeta({data, projectSlug, groupId, event}: DebugMetaProps) {
];
const defaultFilterSelections = (
- 'options' in filterOptions[0] ? filterOptions[0].options : []
+ 'options' in filterOptions[0]! ? filterOptions[0].options : []
).filter(opt => opt.value !== ImageStatus.UNUSED);
setFilterState({
@@ -320,7 +320,7 @@ export function DebugMeta({data, projectSlug, groupId, event}: DebugMetaProps) {
>
diff --git a/static/app/components/events/interfaces/frame/context.tsx b/static/app/components/events/interfaces/frame/context.tsx
index 41c1d465915525..b536195bb65d65 100644
--- a/static/app/components/events/interfaces/frame/context.tsx
+++ b/static/app/components/events/interfaces/frame/context.tsx
@@ -141,7 +141,7 @@ function Context({
) : null;
}
- const startLineNo = hasContextSource ? frame.context[0][0] : 0;
+ const startLineNo = hasContextSource ? frame.context[0]![0] : 0;
const prismClassName = fileExtension ? `language-${fileExtension}` : '';
@@ -157,14 +157,14 @@ function Context({
{lines.map((line, i) => {
- const contextLine = contextLines[i];
- const isActive = activeLineNumber === contextLine[0];
+ const contextLine = contextLines[i]!;
+ const isActive = activeLineNumber === contextLine[0]!;
return (
diff --git a/static/app/components/events/interfaces/frame/contextLine.tsx b/static/app/components/events/interfaces/frame/contextLine.tsx
index 728f36bca89365..0173962dfd3ef4 100644
--- a/static/app/components/events/interfaces/frame/contextLine.tsx
+++ b/static/app/components/events/interfaces/frame/contextLine.tsx
@@ -30,7 +30,7 @@ function ContextLine({line, isActive, children, coverage = ''}: Props) {
let lineWs = '';
let lineCode = '';
if (typeof line[1] === 'string') {
- [, lineWs, lineCode] = line[1].match(/^(\s*)(.*?)$/m)!;
+ [, lineWs, lineCode] = line[1].match(/^(\s*)(.*?)$/m)! as [string, string, string];
}
return (
diff --git a/static/app/components/events/interfaces/frame/deprecatedLine.tsx b/static/app/components/events/interfaces/frame/deprecatedLine.tsx
index df32df23de9a06..a516723d0ee7e4 100644
--- a/static/app/components/events/interfaces/frame/deprecatedLine.tsx
+++ b/static/app/components/events/interfaces/frame/deprecatedLine.tsx
@@ -8,7 +8,7 @@ import {Button} from 'sentry/components/button';
import {Chevron} from 'sentry/components/chevron';
import ErrorBoundary from 'sentry/components/errorBoundary';
import {analyzeFrameForRootCause} from 'sentry/components/events/interfaces/analyzeFrames';
-import LeadHint from 'sentry/components/events/interfaces/frame/line/leadHint';
+import LeadHint from 'sentry/components/events/interfaces/frame/leadHint';
import {StacktraceLink} from 'sentry/components/events/interfaces/frame/stacktraceLink';
import type {FrameSourceMapDebuggerData} from 'sentry/components/events/interfaces/sourceMapsDebuggerModal';
import {SourceMapsDebuggerModal} from 'sentry/components/events/interfaces/sourceMapsDebuggerModal';
diff --git a/static/app/components/events/interfaces/frame/frameVariables.spec.tsx b/static/app/components/events/interfaces/frame/frameVariables.spec.tsx
index e173bf7c25e1df..4bfd5660fffce9 100644
--- a/static/app/components/events/interfaces/frame/frameVariables.spec.tsx
+++ b/static/app/components/events/interfaces/frame/frameVariables.spec.tsx
@@ -75,7 +75,7 @@ describe('Frame Variables', function () {
expect(screen.getAllByText(/redacted/)).toHaveLength(2);
- await userEvent.hover(screen.getAllByText(/redacted/)[0]);
+ await userEvent.hover(screen.getAllByText(/redacted/)[0]!);
expect(
await screen.findByText(
@@ -147,8 +147,8 @@ describe('Frame Variables', function () {
const nullValues = screen.getAllByTestId('value-null');
- expect(within(nullValues[0]).getByText('null')).toBeInTheDocument();
- expect(within(nullValues[1]).getByText('undefined')).toBeInTheDocument();
+ expect(within(nullValues[0]!).getByText('null')).toBeInTheDocument();
+ expect(within(nullValues[1]!).getByText('undefined')).toBeInTheDocument();
expect(
within(screen.getByTestId('value-boolean')).getByText('true')
).toBeInTheDocument();
diff --git a/static/app/components/events/interfaces/frame/line/leadHint.tsx b/static/app/components/events/interfaces/frame/leadHint.tsx
similarity index 100%
rename from static/app/components/events/interfaces/frame/line/leadHint.tsx
rename to static/app/components/events/interfaces/frame/leadHint.tsx
diff --git a/static/app/components/events/interfaces/frame/line/default.tsx b/static/app/components/events/interfaces/frame/line/default.tsx
deleted file mode 100644
index 1bd1b0246a273d..00000000000000
--- a/static/app/components/events/interfaces/frame/line/default.tsx
+++ /dev/null
@@ -1,121 +0,0 @@
-import styled from '@emotion/styled';
-
-import {IconRefresh} from 'sentry/icons/iconRefresh';
-import {tn} from 'sentry/locale';
-import {space} from 'sentry/styles/space';
-import type {Frame} from 'sentry/types/event';
-import {defined} from 'sentry/utils';
-
-import DefaultTitle from '../defaultTitle';
-
-import Expander from './expander';
-import LeadHint from './leadHint';
-import Wrapper from './wrapper';
-
-type Props = React.ComponentProps &
- React.ComponentProps & {
- frame: Frame;
- isUsedForGrouping: boolean;
- frameMeta?: Record;
- onClick?: () => void;
- onMouseDown?: React.MouseEventHandler;
- timesRepeated?: number;
- };
-
-function Default({
- frame,
- nextFrame,
- isHoverPreviewed,
- isExpanded,
- platform,
- timesRepeated,
- isUsedForGrouping,
- leadsToApp,
- onMouseDown,
- onClick,
- frameMeta,
- event,
- ...props
-}: Props) {
- function renderRepeats() {
- if (defined(timesRepeated) && timesRepeated > 0) {
- return (
-
-
-
- {timesRepeated}
-
-
- );
- }
-
- return null;
- }
-
- return (
-
-
-
-
-
-
- {renderRepeats()}
-
-
-
- );
-}
-
-export default Default;
-
-const VertCenterWrapper = styled('div')`
- display: flex;
- align-items: center;
-`;
-
-const Title = styled('div')`
- > * {
- vertical-align: middle;
- line-height: 1;
- }
-`;
-
-const RepeatedContent = styled(VertCenterWrapper)`
- justify-content: center;
- margin-right: ${space(1)};
-`;
-
-const RepeatedFrames = styled('div')`
- display: inline-block;
- border-radius: 50px;
- padding: 1px 3px;
- margin-left: ${space(1)};
- border-width: thin;
- border-style: solid;
- border-color: ${p => p.theme.pink200};
- color: ${p => p.theme.pink400};
- background-color: ${p => p.theme.backgroundSecondary};
- white-space: nowrap;
-`;
-
-const StyledIconRefresh = styled(IconRefresh)`
- margin-right: ${space(0.25)};
-`;
diff --git a/static/app/components/events/interfaces/frame/line/expander.tsx b/static/app/components/events/interfaces/frame/line/expander.tsx
deleted file mode 100644
index affec201d79349..00000000000000
--- a/static/app/components/events/interfaces/frame/line/expander.tsx
+++ /dev/null
@@ -1,44 +0,0 @@
-import styled from '@emotion/styled';
-
-import {Button} from 'sentry/components/button';
-import {SLOW_TOOLTIP_DELAY} from 'sentry/constants';
-import {IconChevron} from 'sentry/icons/iconChevron';
-import {t} from 'sentry/locale';
-import {space} from 'sentry/styles/space';
-import type {PlatformKey} from 'sentry/types/project';
-
-type Props = {
- isExpandable: boolean;
- onToggleContext: (evt: React.MouseEvent) => void;
- platform: PlatformKey;
- isExpanded?: boolean;
- isHoverPreviewed?: boolean;
-};
-
-function Expander({isExpandable, isHoverPreviewed, isExpanded, onToggleContext}: Props) {
- if (!isExpandable) {
- return null;
- }
-
- return (
-
-
-
- );
-}
-
-export default Expander;
-
-// the Button's label has the padding of 3px because the button size has to be 16x16 px.
-const StyledButton = styled(Button)`
- margin-left: ${space(1)};
- span:first-child {
- padding: 3px;
- }
-`;
diff --git a/static/app/components/events/interfaces/frame/line/index.tsx b/static/app/components/events/interfaces/frame/line/index.tsx
deleted file mode 100644
index ee051d3a10e4c1..00000000000000
--- a/static/app/components/events/interfaces/frame/line/index.tsx
+++ /dev/null
@@ -1,197 +0,0 @@
-import {useState} from 'react';
-import styled from '@emotion/styled';
-import classNames from 'classnames';
-
-import ListItem from 'sentry/components/list/listItem';
-import StrictClick from 'sentry/components/strictClick';
-import type {Event} from 'sentry/types/event';
-import type {
- SentryAppComponent,
- SentryAppSchemaStacktraceLink,
-} from 'sentry/types/integrations';
-import type {PlatformKey} from 'sentry/types/project';
-import withSentryAppComponents from 'sentry/utils/withSentryAppComponents';
-
-import Context from '../context';
-import {PackageStatusIcon} from '../packageStatus';
-import {FunctionNameToggleIcon} from '../symbol';
-import {AddressToggleIcon} from '../togglableAddress';
-import {
- getPlatform,
- hasAssembly,
- hasContextRegisters,
- hasContextSource,
- hasContextVars,
- isExpandable,
-} from '../utils';
-
-import Default from './default';
-import {Native} from './native';
-
-type Props = Omit<
- React.ComponentProps,
- 'onToggleContext' | 'isExpandable' | 'leadsToApp' | 'hasGroupingBadge'
-> &
- Omit<
- React.ComponentProps,
- 'onToggleContext' | 'isExpandable' | 'leadsToApp' | 'hasGroupingBadge'
- > & {
- components: SentryAppComponent[];
- event: Event;
- registers: Record;
- emptySourceNotation?: boolean;
- frameMeta?: Record;
- isOnlyFrame?: boolean;
- registersMeta?: Record;
- };
-
-function Line({
- frame,
- nextFrame,
- prevFrame,
- timesRepeated,
- includeSystemFrames,
- isFrameAfterLastNonApp,
- isUsedForGrouping,
- maxLengthOfRelativeAddress,
- image,
- registers,
- isOnlyFrame,
- event,
- components,
- frameMeta,
- registersMeta,
- emptySourceNotation = false,
- /**
- * Is the stack trace being previewed in a hovercard?
- */
- isHoverPreviewed = false,
- ...props
-}: Props) {
- // Prioritize the frame platform but fall back to the platform
- // of the stack trace / exception
- const platform = getPlatform(frame.platform, props.platform ?? 'other') as PlatformKey;
- const leadsToApp = !frame.inApp && (nextFrame?.inApp || !nextFrame);
-
- const expandable =
- !leadsToApp || includeSystemFrames
- ? isExpandable({
- frame,
- registers,
- platform,
- emptySourceNotation,
- isOnlyFrame,
- })
- : false;
-
- const [isExpanded, setIsExpanded] = useState(
- expandable ? props.isExpanded ?? false : false
- );
-
- function toggleContext(evt: React.MouseEvent) {
- evt.preventDefault();
- setIsExpanded(!isExpanded);
- }
-
- function renderLine() {
- switch (platform) {
- case 'objc':
- case 'cocoa':
- case 'native':
- case 'nintendo-switch':
- return (
-
- );
- default:
- return (
-
- );
- }
- }
-
- const className = classNames({
- frame: true,
- 'is-expandable': expandable,
- expanded: isExpanded,
- collapsed: !isExpanded,
- 'system-frame': !frame.inApp,
- 'leads-to-app': leadsToApp,
- });
-
- return (
-
-
- {renderLine()}
-
-
-
- );
-}
-
-export default withSentryAppComponents(Line, {componentType: 'stacktrace-link'});
-
-const StyleListItem = styled(ListItem)`
- overflow: hidden;
-
- :first-child {
- border-top: none;
- }
-
- ${PackageStatusIcon} {
- flex-shrink: 0;
- }
- :hover {
- ${PackageStatusIcon} {
- visibility: visible;
- }
- ${AddressToggleIcon} {
- visibility: visible;
- }
- ${FunctionNameToggleIcon} {
- visibility: visible;
- }
- }
-`;
diff --git a/static/app/components/events/interfaces/frame/line/native.tsx b/static/app/components/events/interfaces/frame/line/native.tsx
deleted file mode 100644
index 6443bf4be74ab4..00000000000000
--- a/static/app/components/events/interfaces/frame/line/native.tsx
+++ /dev/null
@@ -1,205 +0,0 @@
-import {useContext} from 'react';
-import styled from '@emotion/styled';
-
-import {TraceEventDataSectionContext} from 'sentry/components/events/traceEventDataSection';
-import {t} from 'sentry/locale';
-import DebugMetaStore from 'sentry/stores/debugMetaStore';
-import {space} from 'sentry/styles/space';
-import type {Frame} from 'sentry/types/event';
-import {SectionKey} from 'sentry/views/issueDetails/streamline/context';
-
-import type DebugImage from '../../debugMeta/debugImage';
-import {combineStatus} from '../../debugMeta/utils';
-import {SymbolicatorStatus} from '../../types';
-import PackageLink from '../packageLink';
-import PackageStatus from '../packageStatus';
-import Symbol from '../symbol';
-import TogglableAddress from '../togglableAddress';
-import {getPlatform} from '../utils';
-
-import Expander from './expander';
-import LeadHint from './leadHint';
-import Wrapper from './wrapper';
-
-type Props = React.ComponentProps &
- React.ComponentProps & {
- frame: Frame;
- isUsedForGrouping: boolean;
- image?: React.ComponentProps['image'];
- includeSystemFrames?: boolean;
- isFrameAfterLastNonApp?: boolean;
- maxLengthOfRelativeAddress?: number;
- onClick?: () => void;
- onMouseDown?: React.MouseEventHandler;
- prevFrame?: Frame;
- };
-
-export function Native({
- frame,
- isFrameAfterLastNonApp,
- isExpanded,
- isHoverPreviewed,
- image,
- includeSystemFrames,
- maxLengthOfRelativeAddress,
- platform,
- prevFrame,
- isUsedForGrouping,
- nextFrame,
- leadsToApp,
- onMouseDown,
- onClick,
- event,
- ...props
-}: Props) {
- const traceEventDataSectionContext = useContext(TraceEventDataSectionContext);
-
- if (!traceEventDataSectionContext) {
- return null;
- }
-
- const {instructionAddr, trust, addrMode, symbolicatorStatus} = frame ?? {};
-
- function packageStatus() {
- // this is the status of image that belongs to this frame
- if (!image) {
- return 'empty';
- }
-
- const combinedStatus = combineStatus(image.debug_status, image.unwind_status);
-
- switch (combinedStatus) {
- case 'unused':
- return 'empty';
- case 'found':
- return 'success';
- default:
- return 'error';
- }
- }
-
- function makeFilter(addr: string): string {
- if (!(!addrMode || addrMode === 'abs') && image) {
- return `${image.debug_id}!${addr}`;
- }
-
- return addr;
- }
-
- function scrollToImage(e: React.MouseEvent) {
- e.stopPropagation(); // to prevent collapsing if collapsible
-
- if (instructionAddr) {
- DebugMetaStore.updateFilter(makeFilter(instructionAddr));
- }
-
- document
- .getElementById(SectionKey.DEBUGMETA)
- ?.scrollIntoView({block: 'start', behavior: 'smooth'});
- }
-
- const shouldShowLinkToImage =
- !!symbolicatorStatus &&
- symbolicatorStatus !== SymbolicatorStatus.UNKNOWN_IMAGE &&
- !isHoverPreviewed;
-
- const isInlineFrame =
- prevFrame &&
- getPlatform(frame.platform, platform ?? 'other') ===
- (prevFrame.platform || platform) &&
- instructionAddr === prevFrame.instructionAddr;
-
- const isFoundByStackScanning = trust === 'scan' || trust === 'cfi-scan';
-
- return (
-
-
-
-
-
- {!isHoverPreviewed && (
-
- )}
-
-
- {instructionAddr && (
-
- )}
-
-
-
-
- );
-}
-
-const PackageInfo = styled('span')`
- display: grid;
- grid-template-columns: auto 1fr;
- order: 2;
- align-items: flex-start;
- @media (min-width: ${props => props.theme.breakpoints.small}) {
- order: 0;
- }
-`;
-
-const NativeLineContent = styled('div')<{isFrameAfterLastNonApp: boolean}>`
- display: grid;
- flex: 1;
- gap: ${space(0.5)};
- grid-template-columns: auto 1fr;
- align-items: center;
- justify-content: flex-start;
-
- @media (min-width: ${props => props.theme.breakpoints.small}) {
- grid-template-columns:
- ${p => (p.isFrameAfterLastNonApp ? '200px' : '150px')} minmax(117px, auto)
- 1fr;
- }
-
- @media (min-width: ${props => props.theme.breakpoints.large}) and (max-width: ${props =>
- props.theme.breakpoints.xlarge}) {
- grid-template-columns:
- ${p => (p.isFrameAfterLastNonApp ? '180px' : '140px')} minmax(117px, auto)
- 1fr;
- }
-`;
diff --git a/static/app/components/events/interfaces/frame/line/wrapper.tsx b/static/app/components/events/interfaces/frame/line/wrapper.tsx
deleted file mode 100644
index 1e9dd06fd4242d..00000000000000
--- a/static/app/components/events/interfaces/frame/line/wrapper.tsx
+++ /dev/null
@@ -1,8 +0,0 @@
-import styled from '@emotion/styled';
-
-const Wrapper = styled('div')`
- display: grid;
- grid-template-columns: 1fr auto;
-`;
-
-export default Wrapper;
diff --git a/static/app/components/events/interfaces/frame/stacktraceLink.tsx b/static/app/components/events/interfaces/frame/stacktraceLink.tsx
index de9c0abf74cfc3..89ec41f2e1795a 100644
--- a/static/app/components/events/interfaces/frame/stacktraceLink.tsx
+++ b/static/app/components/events/interfaces/frame/stacktraceLink.tsx
@@ -200,7 +200,7 @@ export function StacktraceLink({frame, event, line}: StacktraceLinkProps) {
const url = new URL(sourceLink);
const hostname = url.hostname;
const parts = hostname.split('.');
- const domain = parts.length > 1 ? parts[1] : '';
+ const domain = parts.length > 1 ? parts[1]! : '';
trackAnalytics(
'integrations.non_inapp_stacktrace_link_clicked',
{
@@ -340,7 +340,7 @@ export function StacktraceLink({frame, event, line}: StacktraceLinkProps) {
priority="link"
icon={
sourceCodeProviders.length === 1
- ? getIntegrationIcon(sourceCodeProviders[0].provider.key, 'sm')
+ ? getIntegrationIcon(sourceCodeProviders[0]!.provider.key, 'sm')
: undefined
}
onClick={() => {
@@ -349,7 +349,7 @@ export function StacktraceLink({frame, event, line}: StacktraceLinkProps) {
{
view: 'stacktrace_issue_details',
platform: event.platform,
- provider: sourceCodeProviders[0]?.provider.key,
+ provider: sourceCodeProviders[0]?.provider.key!,
setup_type: 'automatic',
organization,
...getAnalyticsDataForEvent(event),
diff --git a/static/app/components/events/interfaces/frame/stacktraceLinkModal.tsx b/static/app/components/events/interfaces/frame/stacktraceLinkModal.tsx
index c4ef2bc695c8ba..b1cf126d46a630 100644
--- a/static/app/components/events/interfaces/frame/stacktraceLinkModal.tsx
+++ b/static/app/components/events/interfaces/frame/stacktraceLinkModal.tsx
@@ -87,10 +87,10 @@ function StacktraceLinkModal({
// If they have more than one, they'll have to navigate themselves
const hasOneSourceCodeIntegration = sourceCodeProviders.length === 1;
const sourceUrl = hasOneSourceCodeIntegration
- ? `https://${sourceCodeProviders[0].domainName}`
+ ? `https://${sourceCodeProviders[0]!.domainName}`
: undefined;
const providerName = hasOneSourceCodeIntegration
- ? sourceCodeProviders[0].name
+ ? sourceCodeProviders[0]!.name
: t('source code');
const onManualSetup = () => {
@@ -99,7 +99,7 @@ function StacktraceLinkModal({
setup_type: 'manual',
provider:
sourceCodeProviders.length === 1
- ? sourceCodeProviders[0].provider.name
+ ? sourceCodeProviders[0]!.provider.name
: 'unknown',
organization,
});
@@ -171,7 +171,7 @@ function StacktraceLinkModal({
onClick={onManualSetup}
to={
hasOneSourceCodeIntegration
- ? `/settings/${organization.slug}/integrations/${sourceCodeProviders[0].provider.key}/${sourceCodeProviders[0].id}/`
+ ? `/settings/${organization.slug}/integrations/${sourceCodeProviders[0]!.provider.key}/${sourceCodeProviders[0]!.id}/`
: `/settings/${organization.slug}/integrations/`
}
/>
@@ -200,7 +200,7 @@ function StacktraceLinkModal({
? tct('Go to [link]', {
link: (
- {sourceCodeProviders[0].provider.name}
+ {sourceCodeProviders[0]!.provider.name}
),
})
diff --git a/static/app/components/events/interfaces/frame/usePrismTokensSourceContext.tsx b/static/app/components/events/interfaces/frame/usePrismTokensSourceContext.tsx
index 7fc3e56077d458..f82ece58927308 100644
--- a/static/app/components/events/interfaces/frame/usePrismTokensSourceContext.tsx
+++ b/static/app/components/events/interfaces/frame/usePrismTokensSourceContext.tsx
@@ -253,7 +253,7 @@ export const usePrismTokensSourceContext = ({
}) => {
const organization = useOrganization({allowNull: true});
- const fullLanguage = getPrismLanguage(fileExtension);
+ const fullLanguage = getPrismLanguage(fileExtension)!;
const {preCode, executedCode, postCode} = convertContextLines(contextLines, lineNo);
const code = preCode + executedCode + postCode;
diff --git a/static/app/components/events/interfaces/keyValueList/index.spec.tsx b/static/app/components/events/interfaces/keyValueList/index.spec.tsx
index 22f4670cac552e..f0e24cbc5b440d 100644
--- a/static/app/components/events/interfaces/keyValueList/index.spec.tsx
+++ b/static/app/components/events/interfaces/keyValueList/index.spec.tsx
@@ -14,11 +14,11 @@ describe('KeyValueList', function () {
const rows = screen.getAllByRole('row');
expect(rows).toHaveLength(2);
- const firstColumn = within(rows[0]).getAllByRole('cell');
+ const firstColumn = within(rows[0]!).getAllByRole('cell');
expect(firstColumn[0]).toHaveTextContent('a');
expect(firstColumn[1]).toHaveTextContent('x');
- const secondColumn = within(rows[1]).getAllByRole('cell');
+ const secondColumn = within(rows[1]!).getAllByRole('cell');
expect(secondColumn[0]).toHaveTextContent('b');
expect(secondColumn[1]).toHaveTextContent('y');
});
@@ -33,11 +33,11 @@ describe('KeyValueList', function () {
const rows = screen.getAllByRole('row');
- const firstColumn = within(rows[0]).getAllByRole('cell');
+ const firstColumn = within(rows[0]!).getAllByRole('cell');
expect(firstColumn[0]).toHaveTextContent('a');
expect(firstColumn[1]).toHaveTextContent('x');
- const secondColumn = within(rows[1]).getAllByRole('cell');
+ const secondColumn = within(rows[1]!).getAllByRole('cell');
expect(secondColumn[0]).toHaveTextContent('b');
expect(secondColumn[1]).toHaveTextContent('y');
});
@@ -52,11 +52,11 @@ describe('KeyValueList', function () {
const rows = screen.getAllByRole('row');
- const firstColumn = within(rows[0]).getAllByRole('cell');
+ const firstColumn = within(rows[0]!).getAllByRole('cell');
expect(firstColumn[0]).toHaveTextContent('a');
expect(firstColumn[1]).toHaveTextContent(''); // empty string
- const secondColumn = within(rows[1]).getAllByRole('cell');
+ const secondColumn = within(rows[1]!).getAllByRole('cell');
expect(secondColumn[0]).toHaveTextContent('b');
expect(secondColumn[1]).toHaveTextContent('y');
});
@@ -72,10 +72,10 @@ describe('KeyValueList', function () {
const rows = screen.getAllByRole('row');
// Ignore values, more interested in if keys rendered + are sorted
- const firstColumn = within(rows[0]).getAllByRole('cell');
+ const firstColumn = within(rows[0]!).getAllByRole('cell');
expect(firstColumn[0]).toHaveTextContent('a');
- const secondColumn = within(rows[1]).getAllByRole('cell');
+ const secondColumn = within(rows[1]!).getAllByRole('cell');
expect(secondColumn[0]).toHaveTextContent('b');
});
diff --git a/static/app/components/events/interfaces/nativeFrame.tsx b/static/app/components/events/interfaces/nativeFrame.tsx
index 0f598426f97037..5ad6899a7b5044 100644
--- a/static/app/components/events/interfaces/nativeFrame.tsx
+++ b/static/app/components/events/interfaces/nativeFrame.tsx
@@ -39,7 +39,7 @@ import type {PlatformKey} from 'sentry/types/project';
import {defined} from 'sentry/utils';
import {useSyncedLocalStorageState} from 'sentry/utils/useSyncedLocalStorageState';
import withSentryAppComponents from 'sentry/utils/withSentryAppComponents';
-import {SectionKey, useEventDetails} from 'sentry/views/issueDetails/streamline/context';
+import {SectionKey, useIssueDetails} from 'sentry/views/issueDetails/streamline/context';
import {getFoldSectionKey} from 'sentry/views/issueDetails/streamline/foldSection';
import {useHasStreamlinedUI} from 'sentry/views/issueDetails/utils';
@@ -80,7 +80,6 @@ function NativeFrame({
frame,
nextFrame,
prevFrame,
- includeSystemFrames,
isUsedForGrouping,
maxLengthOfRelativeAddress,
image,
@@ -104,7 +103,7 @@ function NativeFrame({
}: Props) {
const traceEventDataSectionContext = useContext(TraceEventDataSectionContext);
- const {sectionData} = useEventDetails();
+ const {sectionData} = useIssueDetails();
const debugSectionConfig = sectionData[SectionKey.DEBUGMETA];
const [_isCollapsed, setIsCollapsed] = useSyncedLocalStorageState(
getFoldSectionKey(SectionKey.DEBUGMETA),
@@ -134,16 +133,13 @@ function NativeFrame({
(hasStreamlinedUI ? !!debugSectionConfig : true);
const leadsToApp = !frame.inApp && (nextFrame?.inApp || !nextFrame);
- const expandable =
- !leadsToApp || includeSystemFrames
- ? isExpandable({
- frame,
- registers,
- platform,
- emptySourceNotation,
- isOnlyFrame,
- })
- : false;
+ const expandable = isExpandable({
+ frame,
+ registers,
+ platform,
+ emptySourceNotation,
+ isOnlyFrame,
+ });
const inlineFrame =
prevFrame &&
diff --git a/static/app/components/events/interfaces/performance/durationChart.tsx b/static/app/components/events/interfaces/performance/durationChart.tsx
deleted file mode 100644
index 265025c54cc293..00000000000000
--- a/static/app/components/events/interfaces/performance/durationChart.tsx
+++ /dev/null
@@ -1,174 +0,0 @@
-import {useRef} from 'react';
-import {useTheme} from '@emotion/react';
-import type {YAXisComponentOption} from 'echarts';
-import type {Location} from 'history';
-import moment from 'moment-timezone';
-
-import EventsRequest from 'sentry/components/charts/eventsRequest';
-import type {LineChartSeries} from 'sentry/components/charts/lineChart';
-import {LineChart} from 'sentry/components/charts/lineChart';
-import LoadingPanel from 'sentry/components/charts/loadingPanel';
-import {getInterval} from 'sentry/components/charts/utils';
-import {IconWarning} from 'sentry/icons';
-import {t} from 'sentry/locale';
-import type {Series} from 'sentry/types/echarts';
-import type {EventError} from 'sentry/types/event';
-import type {Group} from 'sentry/types/group';
-import type {Organization} from 'sentry/types/organization';
-import {
- findRangeOfMultiSeries,
- getDurationUnit,
- tooltipFormatter,
-} from 'sentry/utils/discover/charts';
-import {aggregateOutputType} from 'sentry/utils/discover/fields';
-import useApi from 'sentry/utils/useApi';
-import {ErrorPanel} from 'sentry/views/performance/styles';
-
-interface Props {
- event: EventError;
- issue: Group;
- location: Location;
- organization: Organization;
-}
-
-export function DurationChart({issue, event, organization}: Props) {
- const transactionNameTag = event.tags.find(tag => tag.key === 'transaction');
- const transactionName = transactionNameTag ? transactionNameTag.value : '';
-
- const spanHashTag = event.tags.find(
- tag => tag.key === 'performance_issue.extra_spans'
- ) || {key: '', value: ''};
-
- const allEventsQuery = `event.type:transaction transaction:${transactionName}`;
- const affectedEventsQuery = `${allEventsQuery} ${spanHashTag.key}:${spanHashTag.value}`;
-
- const allEventsApi = useApi();
- const affectedEventsApi = useApi();
-
- const nowRef = useRef(new Date());
-
- // TODO (udameli): Project ID is hardcoded to sentry for the experiment
- // because performance issues from sentry project are sent to a different project
- const PROJECT_ID = 1;
-
- const issueStart = issue.firstSeen;
- const timeFromFirstSeen = moment(nowRef.current).diff(issueStart);
- const start = moment(issueStart).subtract(timeFromFirstSeen).format();
- const interval = getInterval({start, end: nowRef.current, utc: true}, 'low');
-
- return (
-
- {({
- timeseriesData: allEvents,
- loading: allEventsLoading,
- errored: allEventsErrored,
- }) => (
-
- {({
- timeseriesData: data,
- loading: affectedEventsLoading,
- errored: affectedEventsErrored,
- }) => (
-
- )}
-
- )}
-
- );
-}
-
-interface ContentProps {
- affectedEvents: LineChartSeries[] | undefined;
- allEvents: Series[] | undefined;
- errored: boolean;
- loading: boolean;
-}
-
-function Content({affectedEvents, allEvents, errored, loading}: ContentProps) {
- const theme = useTheme();
-
- if (!affectedEvents || affectedEvents.length === 0) {
- return null;
- }
-
- if (loading) {
- return ;
- }
-
- const durationUnit = getDurationUnit(affectedEvents);
- const range = findRangeOfMultiSeries([...affectedEvents, ...(allEvents || [])]);
- let min = 0;
- if (range) {
- min = range.min - (range.max - range.min) * 0.2;
- }
-
- const yAxis: YAXisComponentOption = {
- show: false,
- minInterval: durationUnit,
- min,
- axisLabel: {
- color: theme.chartLabel,
- formatter() {
- return '';
- },
- },
- };
-
- return errored ? (
-
-
-
- ) : (
- {
- return tooltipFormatter(
- value,
- aggregateOutputType(affectedEvents.at(0)?.seriesName ?? seriesName)
- );
- },
- }}
- />
- );
-}
diff --git a/static/app/components/events/interfaces/performance/eventTraceView.spec.tsx b/static/app/components/events/interfaces/performance/eventTraceView.spec.tsx
index a01c8f1e53f503..33fb521eb0b941 100644
--- a/static/app/components/events/interfaces/performance/eventTraceView.spec.tsx
+++ b/static/app/components/events/interfaces/performance/eventTraceView.spec.tsx
@@ -22,7 +22,7 @@ window.ResizeObserver = ResizeObserver;
describe('EventTraceView', () => {
const traceId = 'this-is-a-good-trace-id';
const {organization, project} = initializeData({
- features: ['profiling', 'issue-details-always-show-trace'],
+ features: ['profiling'],
});
const group = GroupFixture();
const event = EventFixture({
@@ -46,6 +46,11 @@ describe('EventTraceView', () => {
});
it('renders a trace', async () => {
+ MockApiClient.addMockResponse({
+ url: '/subscriptions/org-slug/',
+ method: 'GET',
+ body: {},
+ });
MockApiClient.addMockResponse({
method: 'GET',
url: `/organizations/${organization.slug}/events-trace-meta/${traceId}/`,
@@ -103,6 +108,32 @@ describe('EventTraceView', () => {
});
it('still renders trace link for performance issues', async () => {
+ const oneOtherIssueEvent: TraceEventResponse = {
+ data: [
+ {
+ // In issuePlatform, the message contains the title and the transaction
+ message: '/api/slow/ Slow DB Query SELECT "sentry_monitorcheckin"."monitor_id"',
+ timestamp: '2024-01-24T09:09:03+00:00',
+ 'issue.id': 1000,
+ project: project.slug,
+ 'project.name': project.name,
+ title: 'Slow DB Query',
+ id: 'abc',
+ transaction: 'n/a',
+ culprit: '/api/slow/',
+ 'event.type': '',
+ },
+ ],
+ meta: {fields: {}, units: {}},
+ };
+ MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/events/`,
+ body: oneOtherIssueEvent,
+ });
+ MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/projects/`,
+ body: [],
+ });
const perfGroup = GroupFixture({issueCategory: IssueCategory.PERFORMANCE});
const perfEvent = EventFixture({
occurrence: {
@@ -129,9 +160,17 @@ describe('EventTraceView', () => {
expect(
await screen.findByRole('link', {name: 'View Full Trace'})
).toBeInTheDocument();
+ expect(
+ screen.getByText('One other issue appears in the same trace.')
+ ).toBeInTheDocument();
});
it('does not render the trace preview if it has no transactions', async () => {
+ MockApiClient.addMockResponse({
+ url: '/subscriptions/org-slug/',
+ method: 'GET',
+ body: {},
+ });
MockApiClient.addMockResponse({
method: 'GET',
url: `/organizations/${organization.slug}/events-trace-meta/${traceId}/`,
@@ -154,8 +193,5 @@ describe('EventTraceView', () => {
render( );
expect(await screen.findByText('Trace')).toBeInTheDocument();
- expect(
- await screen.findByRole('link', {name: 'View Full Trace'})
- ).toBeInTheDocument();
});
});
diff --git a/static/app/components/events/interfaces/performance/eventTraceView.tsx b/static/app/components/events/interfaces/performance/eventTraceView.tsx
index f47817c661bb32..ce2e3e7700d94a 100644
--- a/static/app/components/events/interfaces/performance/eventTraceView.tsx
+++ b/static/app/components/events/interfaces/performance/eventTraceView.tsx
@@ -1,19 +1,23 @@
-import {useMemo} from 'react';
+import {Fragment, useMemo} from 'react';
import styled from '@emotion/styled';
+import type {LocationDescriptor} from 'history';
import {LinkButton} from 'sentry/components/button';
+import Link from 'sentry/components/links/link';
import {generateTraceTarget} from 'sentry/components/quickTrace/utils';
import {IconOpen} from 'sentry/icons';
import {t} from 'sentry/locale';
-import {space} from 'sentry/styles/space';
import type {Event} from 'sentry/types/event';
import {type Group, IssueCategory} from 'sentry/types/group';
import type {Organization} from 'sentry/types/organization';
+import {defined} from 'sentry/utils';
+import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyticsParams';
import {useLocation} from 'sentry/utils/useLocation';
import useOrganization from 'sentry/utils/useOrganization';
import {SectionKey} from 'sentry/views/issueDetails/streamline/context';
import {InterimSection} from 'sentry/views/issueDetails/streamline/interimSection';
-import {TraceDataSection} from 'sentry/views/issueDetails/traceDataSection';
+import {TraceIssueEvent} from 'sentry/views/issueDetails/traceTimeline/traceIssue';
+import {useTraceTimelineEvents} from 'sentry/views/issueDetails/traceTimeline/useTraceTimelineEvents';
import {IssuesTraceWaterfall} from 'sentry/views/performance/newTraceDetails/issuesTraceWaterfall';
import {useIssuesTraceTree} from 'sentry/views/performance/newTraceDetails/traceApi/useIssuesTraceTree';
import {useTrace} from 'sentry/views/performance/newTraceDetails/traceApi/useTrace';
@@ -35,6 +39,7 @@ const DEFAULT_ISSUE_DETAILS_TRACE_VIEW_PREFERENCES: TracePreferencesState = {
'drawer left': 0.33,
'drawer right': 0.33,
'drawer bottom': 0.4,
+ 'trace context height': 150,
},
layoutOptions: [],
},
@@ -109,6 +114,21 @@ function EventTraceViewInner({event, organization, traceId}: EventTraceViewInner
);
}
+function getHrefFromTraceTarget(traceTarget: LocationDescriptor) {
+ if (typeof traceTarget === 'string') {
+ return traceTarget;
+ }
+
+ const searchParams = new URLSearchParams();
+ for (const key in traceTarget.query) {
+ if (defined(traceTarget.query[key])) {
+ searchParams.append(key, traceTarget.query[key]);
+ }
+ }
+
+ return `${traceTarget.pathname}?${searchParams.toString()}`;
+}
+
function IssuesTraceOverlay({event}: {event: Event}) {
const location = useLocation();
const organization = useOrganization();
@@ -131,13 +151,51 @@ function IssuesTraceOverlay({event}: {event: Event}) {
}
- aria-label={t('Open Trace')}
- to={traceTarget}
- />
+ href={getHrefFromTraceTarget(traceTarget)}
+ external
+ analyticsEventName="Issue Details: View Full Trace"
+ analyticsEventKey="issue_details.view_full_trace"
+ >
+ {t('View Full Trace')}
+
);
}
+function OneOtherIssueEvent({event}: {event: Event}) {
+ const location = useLocation();
+ const organization = useOrganization();
+ const {isLoading, oneOtherIssueEvent} = useTraceTimelineEvents({event});
+ useRouteAnalyticsParams(oneOtherIssueEvent ? {has_related_trace_issue: true} : {});
+
+ if (isLoading || !oneOtherIssueEvent) {
+ return null;
+ }
+
+ const traceTarget = generateTraceTarget(
+ event,
+ organization,
+ {
+ ...location,
+ query: {
+ ...location.query,
+ groupId: event.groupID,
+ },
+ },
+ TraceViewSources.ISSUE_DETAILS
+ );
+
+ return (
+
+
+ {t('One other issue appears in the same trace. ')}
+ {t('View Full Trace')}
+
+
+
+ );
+}
+
const IssuesTraceContainer = styled('div')`
position: relative;
`;
@@ -148,9 +206,19 @@ const IssuesTraceOverlayContainer = styled('div')`
z-index: 10;
a {
+ display: none;
position: absolute;
- top: ${space(1)};
- right: ${space(1)};
+ top: 50%;
+ left: 50%;
+ transform: translate(-50%, -50%);
+ }
+
+ &:hover {
+ background-color: rgba(128, 128, 128, 0.4);
+
+ a {
+ display: block;
+ }
}
`;
@@ -165,18 +233,14 @@ export function EventTraceView({group, event, organization}: EventTraceViewProps
}
const hasProfilingFeature = organization.features.includes('profiling');
- const hasIssueDetailsTrace = organization.features.includes(
- 'issue-details-always-show-trace'
- );
const hasTracePreviewFeature =
hasProfilingFeature &&
- hasIssueDetailsTrace &&
// Only display this for error or default events since performance events are handled elsewhere
group.issueCategory !== IssueCategory.PERFORMANCE;
return (
-
+
{hasTracePreviewFeature && (
;
- eventPlatform: Event['platform'];
- groupId: string;
-};
-
-// This section provides users with resources on how to resolve an issue
-export function Resources({configResources, eventPlatform, groupId}: Props) {
- const organization = useOrganization();
- const links = [
- ...configResources.links,
- ...(configResources.linksByPlatform[eventPlatform ?? ''] ?? []),
- ];
-
- return (
-
- {configResources.description}
-
- {links.map(({link, text}) => (
- // Please note that the UI will not fit a very long text and if we need to support that we will need to update the UI
-
- trackAnalytics('issue_details.resources_link_clicked', {
- organization,
- resource: text,
- group_id: groupId,
- })
- }
- key={link}
- href={link}
- openInNewTab
- >
- {text}
-
- ))}
-
-
- );
-}
-
-const LinkSection = styled('div')`
- display: flex;
- flex-direction: column;
- gap: ${space(1)};
-
- margin-top: ${space(2)};
-
- a {
- display: flex;
- align-items: center;
- width: max-content;
- }
-
- svg {
- margin-right: ${space(1)};
- }
-`;
diff --git a/static/app/components/events/interfaces/performance/spanCountChart.tsx b/static/app/components/events/interfaces/performance/spanCountChart.tsx
deleted file mode 100644
index 123a62a8991ac3..00000000000000
--- a/static/app/components/events/interfaces/performance/spanCountChart.tsx
+++ /dev/null
@@ -1,168 +0,0 @@
-import {useRef} from 'react';
-import type {Location} from 'history';
-
-import {BarChart} from 'sentry/components/charts/barChart';
-import ErrorPanel from 'sentry/components/charts/errorPanel';
-import LoadingPanel from 'sentry/components/charts/loadingPanel';
-import {IconWarning} from 'sentry/icons';
-import {t} from 'sentry/locale';
-import type {EventError, EventTag} from 'sentry/types/event';
-import type {Group} from 'sentry/types/group';
-import type {Organization} from 'sentry/types/organization';
-import toArray from 'sentry/utils/array/toArray';
-import EventView from 'sentry/utils/discover/eventView';
-import SpanCountHistogramQuery from 'sentry/utils/performance/histogram/spanCountHistogramQuery';
-import type {HistogramData} from 'sentry/utils/performance/histogram/types';
-import {formatHistogramData} from 'sentry/utils/performance/histogram/utils';
-import theme from 'sentry/utils/theme';
-
-interface Props {
- event: EventError;
- issue: Group;
- location: Location;
- organization: Organization;
-}
-
-export function SpanCountChart({issue, event, location, organization}: Props) {
- const transactionNameTag = event.tags.find(tag => tag.key === 'transaction');
- const transactionName = transactionNameTag ? transactionNameTag.value : '';
-
- const spanHashTag = event.tags.find(
- (tag: EventTag) => tag.key === 'performance_issue.extra_spans'
- ) || {key: '', value: ''};
-
- const allEventsQuery = `event.type:transaction transaction:${transactionName}`;
- const affectedEventsQuery = `${allEventsQuery} ${spanHashTag.key}:${spanHashTag.value}`;
-
- const nowRef = useRef(new Date());
-
- const start = issue.firstSeen;
- const end = nowRef.current?.toString();
- const environment = [];
- const project = [1];
- const spanOp = event.contexts.performance_issue.op;
-
- function renderChart(data: HistogramData, _data: HistogramData) {
- const xAxis = {
- type: 'category' as const,
- truncate: true,
- axisTick: {
- interval: 5,
- alignWithLabel: true,
- },
- };
-
- const colors = [theme.charts.previousPeriod, ...theme.charts.getColorPalette(4)];
- const tooltip = {
- formatter(series) {
- const seriesData = toArray(series);
- let contents: string[] = [];
-
- contents = seriesData.map(item => {
- const label = item.seriesName;
- const value = item.value[1].toLocaleString();
- return [
- '',
- ].join('');
- });
- const seriesLabel = seriesData[0].value[0];
- contents.push(``);
-
- contents.push('
');
- return contents.join('');
- },
- };
-
- const series = {
- seriesName: t('Affected Transaction Count'),
- data: formatHistogramData(data, {type: 'number'}),
- };
-
- const _series = {
- seriesName: t('All Transaction Count'),
- data: formatHistogramData(_data, {type: 'number'}),
- };
-
- return (
- ''}}}
- series={[_series, series]}
- tooltip={tooltip}
- colors={colors}
- height={200}
- />
- );
- }
-
- const allEvents = EventView.fromNewQueryWithLocation(
- {
- id: undefined,
- version: 2,
- name: '',
- fields: ['transaction.duration'],
- projects: project,
- query: allEventsQuery,
- environment,
- start,
- end,
- },
- location
- );
-
- const affectedEventsEventView = EventView.fromNewQueryWithLocation(
- {
- id: undefined,
- version: 2,
- name: '',
- fields: ['transaction.duration'],
- projects: project,
- query: affectedEventsQuery,
- environment,
- start,
- end,
- },
- location
- );
-
- return (
-
- {({histogram: _histogram, isLoading: _isLoading, error: _error}) => (
-
- {({histogram, isLoading, error}) => {
- if (isLoading || _isLoading) {
- return ;
- }
-
- if (error || _error) {
- return (
-
-
-
- );
- }
-
- return renderChart(histogram || [], _histogram || []);
- }}
-
- )}
-
- );
-}
diff --git a/static/app/components/events/interfaces/performance/spanEvidenceKeyValueList.spec.tsx b/static/app/components/events/interfaces/performance/spanEvidenceKeyValueList.spec.tsx
index 1ce55100e3d1f4..d17fb50539edbe 100644
--- a/static/app/components/events/interfaces/performance/spanEvidenceKeyValueList.spec.tsx
+++ b/static/app/components/events/interfaces/performance/spanEvidenceKeyValueList.spec.tsx
@@ -313,7 +313,7 @@ describe('SpanEvidenceKeyValueList', () => {
screen.getByTestId('span-evidence-key-value-list.starting-span')
).toHaveTextContent('SELECT * FROM USERS LIMIT 100');
- expect(screen.queryAllByRole('cell', {name: 'Parallelizable Spans'}).length).toBe(
+ expect(screen.queryAllByRole('cell', {name: 'Parallelizable Spans'})).toHaveLength(
1
);
const parallelizableSpanKeyValue = screen.getByTestId(
@@ -457,7 +457,7 @@ describe('SpanEvidenceKeyValueList', () => {
screen.getByTestId(/span-evidence-key-value-list.repeating-spans/)
).toHaveTextContent('/book/[Parameters]');
- expect(screen.queryByRole('cell', {name: 'Parameters'})).toBeInTheDocument();
+ expect(screen.getByRole('cell', {name: 'Parameters'})).toBeInTheDocument();
const parametersKeyValue = screen.getByTestId(
'span-evidence-key-value-list.parameters'
@@ -476,7 +476,7 @@ describe('SpanEvidenceKeyValueList', () => {
url: 'http://service.io?id=2543',
},
})?.toString()
- ).toEqual('http://service.io/?id=2543');
+ ).toBe('http://service.io/?id=2543');
});
it('Pulls out a relative URL if a base is provided', () => {
@@ -490,7 +490,7 @@ describe('SpanEvidenceKeyValueList', () => {
},
'http://service.io'
)?.toString()
- ).toEqual('http://service.io/item');
+ ).toBe('http://service.io/item');
});
it('Fetches the query string from the span data if available', () => {
@@ -503,7 +503,7 @@ describe('SpanEvidenceKeyValueList', () => {
'http.query': 'id=153',
},
})?.toString()
- ).toEqual('http://service.io/item?id=153');
+ ).toBe('http://service.io/item?id=153');
});
it('Falls back to span description if URL is faulty', () => {
@@ -515,7 +515,7 @@ describe('SpanEvidenceKeyValueList', () => {
url: '/item',
},
})?.toString()
- ).toEqual('http://service.io/item');
+ ).toBe('http://service.io/item');
});
});
diff --git a/static/app/components/events/interfaces/performance/spanEvidenceKeyValueList.tsx b/static/app/components/events/interfaces/performance/spanEvidenceKeyValueList.tsx
index 59502e75f3b180..edde63cbac628b 100644
--- a/static/app/components/events/interfaces/performance/spanEvidenceKeyValueList.tsx
+++ b/static/app/components/events/interfaces/performance/spanEvidenceKeyValueList.tsx
@@ -74,7 +74,7 @@ function ConsecutiveDBQueriesSpanEvidence({
[
makeTransactionNameRow(event, organization, location, projectSlug),
causeSpans
- ? makeRow(t('Starting Span'), getSpanEvidenceValue(causeSpans[0]))
+ ? makeRow(t('Starting Span'), getSpanEvidenceValue(causeSpans[0]!))
: null,
makeRow('Parallelizable Spans', offendingSpans.map(getSpanEvidenceValue)),
makeRow(
@@ -124,11 +124,11 @@ function LargeHTTPPayloadSpanEvidence({
data={
[
makeTransactionNameRow(event, organization, location, projectSlug),
- makeRow(t('Large HTTP Payload Span'), getSpanEvidenceValue(offendingSpans[0])),
+ makeRow(t('Large HTTP Payload Span'), getSpanEvidenceValue(offendingSpans[0]!)),
makeRow(
t('Payload Size'),
- getSpanFieldBytes(offendingSpans[0], 'http.response_content_length') ??
- getSpanFieldBytes(offendingSpans[0], 'Encoded Body Size')
+ getSpanFieldBytes(offendingSpans[0]!, 'http.response_content_length') ??
+ getSpanFieldBytes(offendingSpans[0]!, 'Encoded Body Size')
),
].filter(Boolean) as KeyValueListData
}
@@ -182,7 +182,7 @@ function NPlusOneDBQueriesSpanEvidence({
makeTransactionNameRow(event, organization, location, projectSlug),
parentSpan ? makeRow(t('Parent Span'), getSpanEvidenceValue(parentSpan)) : null,
causeSpans.length > 0
- ? makeRow(t('Preceding Span'), getSpanEvidenceValue(causeSpans[0]))
+ ? makeRow(t('Preceding Span'), getSpanEvidenceValue(causeSpans[0]!))
: null,
...repeatingSpanRows,
].filter(Boolean) as KeyValueListData
@@ -202,7 +202,7 @@ function NPlusOneAPICallsSpanEvidence({
const baseURL = requestEntry?.data?.url;
const problemParameters = formatChangingQueryParameters(offendingSpans, baseURL);
- const commonPathPrefix = formatBasePath(offendingSpans[0], baseURL);
+ const commonPathPrefix = formatBasePath(offendingSpans[0]!, baseURL);
return (
@@ -394,12 +394,15 @@ function RenderBlockingAssetSpanEvidence({
);
@@ -416,15 +419,15 @@ function UncompressedAssetSpanEvidence({
@@ -444,7 +447,7 @@ function DefaultSpanEvidence({
[
makeTransactionNameRow(event, organization, location, projectSlug),
offendingSpans.length > 0
- ? makeRow(t('Offending Span'), getSpanEvidenceValue(offendingSpans[0]))
+ ? makeRow(t('Offending Span'), getSpanEvidenceValue(offendingSpans[0]!))
: null,
].filter(Boolean) as KeyValueListData
}
@@ -658,7 +661,7 @@ function formatChangingQueryParameters(spans: Span[], baseURL?: string): string[
const pairs: string[] = [];
for (const key in allQueryParameters) {
- const values = allQueryParameters[key];
+ const values = allQueryParameters[key]!;
// By definition, if the parameter only has one value that means it's not
// changing between calls, so omit it!
@@ -690,7 +693,7 @@ export const extractSpanURLString = (span: Span, baseURL?: string): URL | null =
}
}
- const [_method, _url] = (span?.description ?? '').split(' ', 2);
+ const [_method, _url] = (span?.description ?? '').split(' ', 2) as [string, string];
return safeURL(_url, baseURL) ?? null;
};
diff --git a/static/app/components/events/interfaces/request/index.spec.tsx b/static/app/components/events/interfaces/request/index.spec.tsx
index 2ee61131f256e3..51b623c53695c3 100644
--- a/static/app/components/events/interfaces/request/index.spec.tsx
+++ b/static/app/components/events/interfaces/request/index.spec.tsx
@@ -172,7 +172,7 @@ describe('Request entry', function () {
},
});
- render( , {
+ render( , {
organization: {
relayPiiConfig: JSON.stringify(DataScrubbingRelayPiiConfigFixture()),
},
@@ -186,7 +186,7 @@ describe('Request entry', function () {
expect(screen.getAllByText(/redacted/)).toHaveLength(7);
- await userEvent.hover(screen.getAllByText(/redacted/)[0]);
+ await userEvent.hover(screen.getAllByText(/redacted/)[0]!);
expect(
await screen.findByText(
@@ -221,7 +221,7 @@ describe('Request entry', function () {
],
});
- render( , {
+ render( , {
organization: {
relayPiiConfig: JSON.stringify(DataScrubbingRelayPiiConfigFixture()),
},
@@ -255,7 +255,7 @@ describe('Request entry', function () {
],
});
- render( , {
+ render( , {
organization: {
relayPiiConfig: JSON.stringify(DataScrubbingRelayPiiConfigFixture()),
},
@@ -289,7 +289,7 @@ describe('Request entry', function () {
],
});
- render( , {
+ render( , {
organization: {
relayPiiConfig: JSON.stringify(DataScrubbingRelayPiiConfigFixture()),
},
@@ -325,7 +325,7 @@ describe('Request entry', function () {
});
expect(() =>
- render( , {
+ render( , {
organization: {
relayPiiConfig: JSON.stringify(DataScrubbingRelayPiiConfigFixture()),
},
@@ -357,7 +357,7 @@ describe('Request entry', function () {
],
});
expect(() =>
- render( , {
+ render( , {
organization: {
relayPiiConfig: JSON.stringify(DataScrubbingRelayPiiConfigFixture()),
},
@@ -388,7 +388,7 @@ describe('Request entry', function () {
});
expect(() =>
- render( , {
+ render( , {
organization: {
relayPiiConfig: JSON.stringify(DataScrubbingRelayPiiConfigFixture()),
},
@@ -418,7 +418,7 @@ describe('Request entry', function () {
],
});
- render( );
+ render( );
expect(screen.getByText('query Test { test }')).toBeInTheDocument();
expect(screen.getByRole('row', {name: 'operationName Test'})).toBeInTheDocument();
@@ -456,11 +456,13 @@ describe('Request entry', function () {
});
const {container} = render(
-
+
);
+ // eslint-disable-next-line testing-library/no-container
expect(container.querySelector('.line-highlight')).toBeInTheDocument();
expect(
+ // eslint-disable-next-line testing-library/no-container
container.querySelector('.line-highlight')?.getAttribute('data-start')
).toBe('1');
expect(
diff --git a/static/app/components/events/interfaces/searchBarAction.spec.tsx b/static/app/components/events/interfaces/searchBarAction.spec.tsx
index 744f6721adb507..dbf5f58e269a99 100644
--- a/static/app/components/events/interfaces/searchBarAction.spec.tsx
+++ b/static/app/components/events/interfaces/searchBarAction.spec.tsx
@@ -63,7 +63,7 @@ const options: NonNullable<
];
describe('SearchBarAction', () => {
- let handleFilter;
+ let handleFilter!: jest.Mock;
beforeEach(() => {
handleFilter = jest.fn();
@@ -115,7 +115,7 @@ describe('SearchBarAction', () => {
});
it('With Option Type only', async () => {
- const typeOptions = options[0];
+ const typeOptions = options[0]!;
render(
{
});
it('With Option Level only', async () => {
- const levelOptions = options[1];
+ const levelOptions = options[1]!;
render(
{tct(
'The source file for this stack frame is missing a source map reference. A source map reference is usually represented by a [sourceMappingUrl] comment at the bottom of your source file.',
- {sourceMappingUrl: //# sourceMappingURL=... }
+ {sourceMappingUrl: {'//# sourceMappingURL=...'} }
)}
diff --git a/static/app/components/events/interfaces/spans/gapSpanDetails.tsx b/static/app/components/events/interfaces/spans/gapSpanDetails.tsx
index 0993e5dd67aec3..bb98a2e6706333 100644
--- a/static/app/components/events/interfaces/spans/gapSpanDetails.tsx
+++ b/static/app/components/events/interfaces/spans/gapSpanDetails.tsx
@@ -193,7 +193,7 @@ function ProfilePreviewHeader({canvasView, event, organization}: ProfilePreviewP
return (
- {t('Related Profile')}
+ {t('Profile')}
{
if (minimapInteractiveRef.current) {
const minimapWidth = minimapInteractiveRef.current.getBoundingClientRect().width;
if (minimapWidth !== this.state.minimapWidth) {
- // eslint-disable-next-line react/no-did-update-set-state
this.setState({
minimapWidth,
});
diff --git a/static/app/components/events/interfaces/spans/index.tsx b/static/app/components/events/interfaces/spans/index.tsx
index ae26ebce5350cf..f3ba8a2191d708 100644
--- a/static/app/components/events/interfaces/spans/index.tsx
+++ b/static/app/components/events/interfaces/spans/index.tsx
@@ -1,5 +1,4 @@
import {Fragment, useMemo} from 'react';
-// eslint-disable-next-line no-restricted-imports
import styled from '@emotion/styled';
import {Observer} from 'mobx-react';
diff --git a/static/app/components/events/interfaces/spans/measurementsPanel.tsx b/static/app/components/events/interfaces/spans/measurementsPanel.tsx
index b7ed38908bf219..565b3fb453fc79 100644
--- a/static/app/components/events/interfaces/spans/measurementsPanel.tsx
+++ b/static/app/components/events/interfaces/spans/measurementsPanel.tsx
@@ -33,7 +33,7 @@ function MeasurementsPanel(props: Props) {
}}
>
{Array.from(measurements.values()).map(verticalMark => {
- const mark = Object.values(verticalMark.marks)[0];
+ const mark = Object.values(verticalMark.marks)[0]!;
const {timestamp} = mark;
const bounds = getMeasurementBounds(timestamp, generateBounds);
@@ -45,7 +45,7 @@ function MeasurementsPanel(props: Props) {
const vitalLabels: VitalLabel[] = Object.keys(verticalMark.marks).map(name => ({
vital: VITAL_DETAILS[`measurements.${name}`],
- isPoorValue: verticalMark.marks[name].failedThreshold,
+ isPoorValue: verticalMark.marks[name]!.failedThreshold,
}));
if (vitalLabels.length > 1) {
@@ -62,7 +62,7 @@ function MeasurementsPanel(props: Props) {
);
})}
@@ -133,7 +133,6 @@ class LabelContainer extends Component {
componentDidMount() {
const {current} = this.elementDOMRef;
if (current) {
- // eslint-disable-next-line react/no-did-mount-set-state
this.setState({
width: current.clientWidth,
});
@@ -179,7 +178,6 @@ class MultiLabelContainer extends Component {
componentDidMount() {
const {current} = this.elementDOMRef;
if (current) {
- // eslint-disable-next-line react/no-did-mount-set-state
this.setState({
width: current.clientWidth,
});
diff --git a/static/app/components/events/interfaces/spans/newTraceDetailsSpanBar.tsx b/static/app/components/events/interfaces/spans/newTraceDetailsSpanBar.tsx
index a65f7e4bb83f18..97c38dccfdd1c3 100644
--- a/static/app/components/events/interfaces/spans/newTraceDetailsSpanBar.tsx
+++ b/static/app/components/events/interfaces/spans/newTraceDetailsSpanBar.tsx
@@ -23,7 +23,6 @@ import {
DividerLine,
DividerLineGhostContainer,
ErrorBadge,
- MetricsBadge,
ProfileBadge,
} from 'sentry/components/performance/waterfall/rowDivider';
import {
@@ -54,7 +53,6 @@ import {defined} from 'sentry/utils';
import {trackAnalytics} from 'sentry/utils/analytics';
import {browserHistory} from 'sentry/utils/browserHistory';
import {generateEventSlug} from 'sentry/utils/discover/urls';
-import {hasMetricsExperimentalFeature} from 'sentry/utils/metrics/features';
import toPercent from 'sentry/utils/number/toPercent';
import type {QuickTraceContextChildrenProps} from 'sentry/utils/performance/quickTrace/quickTraceContext';
import type {
@@ -331,7 +329,7 @@ export class NewTraceDetailsSpanBar extends Component<
return (
{Array.from(spanMeasurements.values()).map(verticalMark => {
- const mark = Object.values(verticalMark.marks)[0];
+ const mark = Object.values(verticalMark.marks)[0]!;
const {timestamp} = mark;
const bounds = getMeasurementBounds(timestamp, generateBounds);
@@ -575,7 +573,7 @@ export class NewTraceDetailsSpanBar extends Component<
connectObservers() {
const observer = new IntersectionObserver(([entry]) =>
- this.setState({isIntersecting: entry.isIntersecting}, () => {
+ this.setState({isIntersecting: entry!.isIntersecting}, () => {
// Scrolls the next(invisible) bar from the virtualized list,
// by its height. Allows us to look for anchored span bars occuring
// at the bottom of the span tree.
@@ -694,15 +692,6 @@ export class NewTraceDetailsSpanBar extends Component<
return errors?.length ? : null;
}
- renderMetricsBadge(span: NewTraceDetailsSpanBarProps['span']): React.ReactNode {
- const hasMetrics =
- '_metrics_summary' in span && Object.keys(span._metrics_summary ?? {}).length > 0;
-
- return hasMetrics && hasMetricsExperimentalFeature(this.props.organization) ? (
-
- ) : null;
- }
-
renderEmbeddedTransactionsBadge(
transactions: QuickTraceEvent[] | null
): React.ReactNode {
@@ -868,7 +857,6 @@ export class NewTraceDetailsSpanBar extends Component<
{this.renderDivider(dividerHandlerChildrenProps)}
- {this.renderMetricsBadge(this.props.span)}
{this.renderErrorBadge(errors)}
{this.renderEmbeddedTransactionsBadge(transactions)}
{this.renderMissingInstrumentationProfileBadge()}
diff --git a/static/app/components/events/interfaces/spans/newTraceDetailsSpanDetails.tsx b/static/app/components/events/interfaces/spans/newTraceDetailsSpanDetails.tsx
index 6886769c2e1013..3d870014281e6e 100644
--- a/static/app/components/events/interfaces/spans/newTraceDetailsSpanDetails.tsx
+++ b/static/app/components/events/interfaces/spans/newTraceDetailsSpanDetails.tsx
@@ -12,7 +12,6 @@ import SpanSummaryButton from 'sentry/components/events/interfaces/spans/spanSum
import FileSize from 'sentry/components/fileSize';
import ExternalLink from 'sentry/components/links/externalLink';
import LoadingIndicator from 'sentry/components/loadingIndicator';
-import {CustomMetricsEventData} from 'sentry/components/metrics/customMetricsEventData';
import Pill from 'sentry/components/pill';
import Pills from 'sentry/components/pills';
import {TransactionToProfileButton} from 'sentry/components/profiling/transactionToProfileButton';
@@ -275,7 +274,7 @@ function NewTraceDetailsSpanDetail(props: SpanDetailProps) {
projectID: event.projectID,
})}
>
- {hasNewSpansUIFlag ? t('View Span Summary') : t('View Similar Spans')}
+ {hasNewSpansUIFlag ? t('More Samples') : t('View Similar Spans')}
);
@@ -580,13 +579,6 @@ function NewTraceDetailsSpanDetail(props: SpanDetailProps) {
})}
- {span._metrics_summary ? (
-
- ) : null}
);
@@ -607,7 +599,7 @@ function NewTraceDetailsSpanDetail(props: SpanDetailProps) {
function SpanHTTPInfo({span}: {span: RawSpanType}) {
if (span.op === 'http.client' && span.description) {
- const [method, url] = span.description.split(' ');
+ const [method, url] = span.description.split(' ') as [string, string];
const parsedURL = safeURL(url);
const queryString = qs.parse(parsedURL?.search ?? '');
diff --git a/static/app/components/events/interfaces/spans/newTraceDetailsSpanTree.tsx b/static/app/components/events/interfaces/spans/newTraceDetailsSpanTree.tsx
index 0a626eb2ee50cf..3ae9bc742a86f6 100644
--- a/static/app/components/events/interfaces/spans/newTraceDetailsSpanTree.tsx
+++ b/static/app/components/events/interfaces/spans/newTraceDetailsSpanTree.tsx
@@ -309,7 +309,7 @@ class NewTraceDetailsSpanTree extends Component {
const showHiddenSpansMessage = !isCurrentSpanHidden && numOfSpansOutOfViewAbove > 0;
if (showHiddenSpansMessage) {
- firstHiddenSpanId = getSpanID(outOfViewSpansAbove[0].span);
+ firstHiddenSpanId = getSpanID(outOfViewSpansAbove[0]!.span);
messages.push(
{numOfSpansOutOfViewAbove} {t('spans out of view')}
@@ -322,7 +322,7 @@ class NewTraceDetailsSpanTree extends Component {
!isCurrentSpanFilteredOut && numOfFilteredSpansAbove > 0;
if (showFilteredSpansMessage) {
- firstHiddenSpanId = getSpanID(filteredSpansAbove[0].span);
+ firstHiddenSpanId = getSpanID(filteredSpansAbove[0]!.span);
if (!isCurrentSpanHidden) {
if (numOfFilteredSpansAbove === 1) {
messages.push(
@@ -499,7 +499,7 @@ class NewTraceDetailsSpanTree extends Component {
// and it's a last child.
const generationOffset =
parentContinuingDepths.length === 1 &&
- parentContinuingDepths[0].depth === 0 &&
+ parentContinuingDepths[0]!.depth === 0 &&
parentGeneration > 2
? 2
: 1;
@@ -864,7 +864,7 @@ function SpanRow(props: SpanRowProps) {
} = props;
const rowRef = useRef(null);
- const spanNode = spanTree[index];
+ const spanNode = spanTree[index]!;
useEffect(() => {
// Gap spans do not have IDs, so we can't really store them. This should not be a big deal, since
diff --git a/static/app/components/events/interfaces/spans/spanBar.tsx b/static/app/components/events/interfaces/spans/spanBar.tsx
index 7754f40a2a64f1..535c0c8834a60e 100644
--- a/static/app/components/events/interfaces/spans/spanBar.tsx
+++ b/static/app/components/events/interfaces/spans/spanBar.tsx
@@ -425,7 +425,7 @@ export class SpanBar extends Component {
return (
{Array.from(measurements.values()).map(verticalMark => {
- const mark = Object.values(verticalMark.marks)[0];
+ const mark = Object.values(verticalMark.marks)[0]!;
const {timestamp} = mark;
const bounds = getMeasurementBounds(timestamp, generateBounds);
diff --git a/static/app/components/events/interfaces/spans/spanDetail.spec.tsx b/static/app/components/events/interfaces/spans/spanDetail.spec.tsx
index be5a4df59b5b89..9bf9fa577e16ca 100644
--- a/static/app/components/events/interfaces/spans/spanDetail.spec.tsx
+++ b/static/app/components/events/interfaces/spans/spanDetail.spec.tsx
@@ -118,7 +118,7 @@ describe('SpanDetail', function () {
).toBeInTheDocument();
expect(
- screen.queryByRole('button', {name: 'View Query Summary'})
+ screen.queryByRole('button', {name: 'View Summary'})
).not.toBeInTheDocument();
});
@@ -145,10 +145,8 @@ describe('SpanDetail', function () {
expect(
screen.getByRole('button', {name: 'View Similar Spans'})
).toBeInTheDocument();
- expect(
- screen.getByRole('button', {name: 'View Query Summary'})
- ).toBeInTheDocument();
- expect(screen.getByRole('button', {name: 'View Query Summary'})).toHaveAttribute(
+ expect(screen.getByRole('button', {name: 'View Summary'})).toBeInTheDocument();
+ expect(screen.getByRole('button', {name: 'View Summary'})).toHaveAttribute(
'href',
'/organizations/org-slug/insights/backend/database/spans/span/a7ebd21614897/?project=2'
);
diff --git a/static/app/components/events/interfaces/spans/spanDetail.tsx b/static/app/components/events/interfaces/spans/spanDetail.tsx
index 7630e53cb5b904..f0a1b3d99912a8 100644
--- a/static/app/components/events/interfaces/spans/spanDetail.tsx
+++ b/static/app/components/events/interfaces/spans/spanDetail.tsx
@@ -12,7 +12,6 @@ import FileSize from 'sentry/components/fileSize';
import ExternalLink from 'sentry/components/links/externalLink';
import Link from 'sentry/components/links/link';
import LoadingIndicator from 'sentry/components/loadingIndicator';
-import {CustomMetricsEventData} from 'sentry/components/metrics/customMetricsEventData';
import {
ErrorDot,
ErrorLevel,
@@ -197,7 +196,7 @@ function SpanDetail(props: Props) {
return null;
}
- const childTransaction = childTransactions[0];
+ const childTransaction = childTransactions[0]!;
const transactionResult: TransactionResult = {
'project.name': childTransaction.project_slug,
@@ -578,13 +577,6 @@ function SpanDetail(props: Props) {
))}
- {span._metrics_summary && (
-
- )}
);
diff --git a/static/app/components/events/interfaces/spans/spanProfileDetails.tsx b/static/app/components/events/interfaces/spans/spanProfileDetails.tsx
index 22f63b9582b4ca..197d5df1220176 100644
--- a/static/app/components/events/interfaces/spans/spanProfileDetails.tsx
+++ b/static/app/components/events/interfaces/spans/spanProfileDetails.tsx
@@ -107,7 +107,7 @@ export function useSpanProfileDetails(event, span) {
// find the number of nodes with the minimum number of samples
let hasMinCount = 0;
for (let i = 0; i < nodes.length; i++) {
- if (nodes[i].count >= TOP_NODE_MIN_COUNT) {
+ if (nodes[i]!.count >= TOP_NODE_MIN_COUNT) {
hasMinCount += 1;
} else {
break;
@@ -125,7 +125,7 @@ export function useSpanProfileDetails(event, span) {
}
return {
- frames: extractFrames(nodes[index], event.platform || 'other'),
+ frames: extractFrames(nodes[index]!, event.platform || 'other'),
hasPrevious: index > 0,
hasNext: index + 1 < maxNodes,
};
@@ -195,7 +195,7 @@ export function SpanProfileDetails({
return null;
}
- const percentage = formatPercentage(nodes[index].count / totalWeight);
+ const percentage = formatPercentage(nodes[index]!.count / totalWeight);
return (
@@ -217,7 +217,7 @@ export function SpanProfileDetails({
size="xs"
title={t(
'%s out of %s (%s) of the call stacks collected during this span',
- nodes[index].count,
+ nodes[index]!.count,
totalWeight,
percentage
)}
@@ -274,11 +274,11 @@ function getTopNodes(profile: Profile, startTimestamp, stopTimestamp): CallTreeN
const callTree: CallTreeNode = new CallTreeNode(ProfilingFrame.Root, null);
for (let i = 0; i < profile.samples.length; i++) {
- const sample = profile.samples[i];
+ const sample = profile.samples[i]!;
// TODO: should this take self times into consideration?
const inRange = startTimestamp <= duration && duration < stopTimestamp;
- duration += profile.weights[i];
+ duration += profile.weights[i]!;
if (sample.isRoot || !inRange) {
continue;
diff --git a/static/app/components/events/interfaces/spans/spanSiblingGroupBar.tsx b/static/app/components/events/interfaces/spans/spanSiblingGroupBar.tsx
index c65f4b88d04776..75fb10f64bf148 100644
--- a/static/app/components/events/interfaces/spans/spanSiblingGroupBar.tsx
+++ b/static/app/components/events/interfaces/spans/spanSiblingGroupBar.tsx
@@ -72,8 +72,8 @@ export default function SpanSiblingGroupBar(props: SpanSiblingGroupBarProps) {
return '';
}
- const operation = spanGrouping[0].span.op;
- const description = spanGrouping[0].span.description;
+ const operation = spanGrouping[0]!.span.op;
+ const description = spanGrouping[0]!.span.description;
if (!description || !operation) {
if (description) {
@@ -132,7 +132,7 @@ export default function SpanSiblingGroupBar(props: SpanSiblingGroupBarProps) {
))}
@@ -155,7 +155,7 @@ export default function SpanSiblingGroupBar(props: SpanSiblingGroupBarProps) {
spanNumber={spanNumber}
generateBounds={generateBounds}
toggleSpanGroup={() => {
- toggleSiblingSpanGroup?.(spanGrouping[0].span, occurrence);
+ toggleSiblingSpanGroup?.(spanGrouping[0]!.span, occurrence);
isEmbeddedSpanTree &&
trackAnalytics('issue_details.performance.autogrouped_siblings_toggle', {
organization,
diff --git a/static/app/components/events/interfaces/spans/spanSummaryButton.tsx b/static/app/components/events/interfaces/spans/spanSummaryButton.tsx
index 12750e292fc7bf..5aab475016cb91 100644
--- a/static/app/components/events/interfaces/spans/spanSummaryButton.tsx
+++ b/static/app/components/events/interfaces/spans/spanSummaryButton.tsx
@@ -54,7 +54,7 @@ function SpanSummaryButton(props: Props) {
});
}}
>
- {t('View Query Summary')}
+ {t('View Summary')}
);
}
diff --git a/static/app/components/events/interfaces/spans/spanTree.tsx b/static/app/components/events/interfaces/spans/spanTree.tsx
index 41f653483eae9b..03960067aeba25 100644
--- a/static/app/components/events/interfaces/spans/spanTree.tsx
+++ b/static/app/components/events/interfaces/spans/spanTree.tsx
@@ -305,7 +305,7 @@ class SpanTree extends Component {
const showHiddenSpansMessage = !isCurrentSpanHidden && numOfSpansOutOfViewAbove > 0;
if (showHiddenSpansMessage) {
- firstHiddenSpanId = getSpanID(outOfViewSpansAbove[0].span);
+ firstHiddenSpanId = getSpanID(outOfViewSpansAbove[0]!.span);
messages.push(
{numOfSpansOutOfViewAbove} {t('spans out of view')}
@@ -318,7 +318,7 @@ class SpanTree extends Component {
!isCurrentSpanFilteredOut && numOfFilteredSpansAbove > 0;
if (showFilteredSpansMessage) {
- firstHiddenSpanId = getSpanID(filteredSpansAbove[0].span);
+ firstHiddenSpanId = getSpanID(filteredSpansAbove[0]!.span);
if (!isCurrentSpanHidden) {
if (numOfFilteredSpansAbove === 1) {
messages.push(
@@ -816,7 +816,7 @@ function SpanRow(props: SpanRowProps) {
} = props;
const rowRef = useRef(null);
- const spanNode = spanTree[index];
+ const spanNode = spanTree[index]!;
useEffect(() => {
// Gap spans do not have IDs, so we can't really store them. This should not be a big deal, since
diff --git a/static/app/components/events/interfaces/spans/spanTreeModel.spec.tsx b/static/app/components/events/interfaces/spans/spanTreeModel.spec.tsx
index e536a47efb7b76..cf7688e9870f33 100644
--- a/static/app/components/events/interfaces/spans/spanTreeModel.spec.tsx
+++ b/static/app/components/events/interfaces/spans/spanTreeModel.spec.tsx
@@ -455,11 +455,11 @@ describe('SpanTreeModel', () => {
);
fullWaterfallExpected[0] = {
- ...fullWaterfallExpected[0],
+ ...fullWaterfallExpected[0]!,
};
- assert(fullWaterfallExpected[0].type === 'span');
- fullWaterfallExpected[0].numOfSpanChildren += 1;
- fullWaterfallExpected[0].showEmbeddedChildren = true;
+ assert(fullWaterfallExpected[0]!.type === 'span');
+ fullWaterfallExpected[0]!.numOfSpanChildren += 1;
+ fullWaterfallExpected[0]!.showEmbeddedChildren = true;
expect(spans).toEqual(fullWaterfallExpected);
@@ -559,11 +559,11 @@ describe('SpanTreeModel', () => {
},
};
- if (!Array.isArray(event2.entries[0].data)) {
+ if (!Array.isArray(event2.entries[0]!.data)) {
throw new Error('event2.entries[0].data is not an array');
}
- const data = event2.entries[0].data as RawSpanType[];
+ const data = event2.entries[0]!.data as RawSpanType[];
for (let i = 0; i < 5; i++) {
data.push(spanTemplate);
}
@@ -602,12 +602,12 @@ describe('SpanTreeModel', () => {
directParent: null,
});
- expect(spans.length).toEqual(2);
- expect(spans[1].type).toEqual('span_group_siblings');
+ expect(spans).toHaveLength(2);
+ expect(spans[1]!.type).toBe('span_group_siblings');
// If statement here is required to avoid TS linting issues
- if (spans[1].type === 'span_group_siblings') {
- expect(spans[1].spanSiblingGrouping!.length).toEqual(5);
+ if (spans[1]!.type === 'span_group_siblings') {
+ expect(spans[1]!.spanSiblingGrouping!).toHaveLength(5);
}
});
@@ -641,11 +641,11 @@ describe('SpanTreeModel', () => {
},
};
- if (!Array.isArray(event2.entries[0].data)) {
+ if (!Array.isArray(event2.entries[0]!.data)) {
throw new Error('event2.entries[0].data is not an array');
}
- const data = event2.entries[0].data as RawSpanType[];
+ const data = event2.entries[0]!.data as RawSpanType[];
for (let i = 0; i < 4; i++) {
data.push(spanTemplate);
}
@@ -684,8 +684,8 @@ describe('SpanTreeModel', () => {
directParent: null,
});
- expect(spans.length).toEqual(5);
- spans.forEach(span => expect(span.type).toEqual('span'));
+ expect(spans).toHaveLength(5);
+ spans.forEach(span => expect(span.type).toBe('span'));
});
it('properly autogroups similar siblings and leaves other siblings ungrouped', () => {
@@ -737,11 +737,11 @@ describe('SpanTreeModel', () => {
},
};
- if (!Array.isArray(event2.entries[0].data)) {
+ if (!Array.isArray(event2.entries[0]!.data)) {
throw new Error('event2.entries[0].data is not an array');
}
- const data = event2.entries[0].data as RawSpanType[];
+ const data = event2.entries[0]!.data as RawSpanType[];
for (let i = 0; i < 7; i++) {
data.push(groupableSpanTemplate);
}
@@ -787,9 +787,9 @@ describe('SpanTreeModel', () => {
directParent: null,
});
- expect(spans.length).toEqual(4);
- expect(spans[1].type).toEqual('span_group_siblings');
- expect(spans[2].type).toEqual('span');
- expect(spans[3].type).toEqual('span_group_siblings');
+ expect(spans).toHaveLength(4);
+ expect(spans[1]!.type).toBe('span_group_siblings');
+ expect(spans[2]!.type).toBe('span');
+ expect(spans[3]!.type).toBe('span_group_siblings');
});
});
diff --git a/static/app/components/events/interfaces/spans/spanTreeModel.tsx b/static/app/components/events/interfaces/spans/spanTreeModel.tsx
index 7de0a6c3ceaf99..41fd73af6d5898 100644
--- a/static/app/components/events/interfaces/spans/spanTreeModel.tsx
+++ b/static/app/components/events/interfaces/spans/spanTreeModel.tsx
@@ -262,11 +262,11 @@ class SpanTreeModel {
// we will need to reconstruct the tree depth information. This is only neccessary
// when the span group chain is hidden/collapsed.
if (spanNestedGrouping.length === 1) {
- const treeDepthEntry = isOrphanSpan(spanNestedGrouping[0].span)
- ? ({type: 'orphan', depth: spanNestedGrouping[0].treeDepth} as OrphanTreeDepth)
- : spanNestedGrouping[0].treeDepth;
+ const treeDepthEntry = isOrphanSpan(spanNestedGrouping[0]!.span)
+ ? ({type: 'orphan', depth: spanNestedGrouping[0]!.treeDepth} as OrphanTreeDepth)
+ : spanNestedGrouping[0]!.treeDepth;
- if (!spanNestedGrouping[0].isLastSibling) {
+ if (!spanNestedGrouping[0]!.isLastSibling) {
continuingTreeDepths = [...continuingTreeDepths, treeDepthEntry];
}
}
@@ -352,11 +352,11 @@ class SpanTreeModel {
};
if (descendantsSource?.length >= MIN_SIBLING_GROUP_SIZE) {
- let prevSpanModel = descendantsSource[0];
+ let prevSpanModel = descendantsSource[0]!;
let currentGroup = [prevSpanModel];
for (let i = 1; i < descendantsSource.length; i++) {
- const currSpanModel = descendantsSource[i];
+ const currSpanModel = descendantsSource[i]!;
// We want to group siblings only if they share the same op and description, and if they have no children
if (
@@ -438,7 +438,7 @@ class SpanTreeModel {
return acc;
}
- const key = getSiblingGroupKey(group[0].span, occurrence);
+ const key = getSiblingGroupKey(group[0]!.span, occurrence);
if (this.expandedSiblingGroups.has(key)) {
// This check is needed here, since it is possible that a user could be filtering for a specific span ID.
// In this case, we must add only the specified span into the accumulator's descendants
@@ -482,7 +482,7 @@ class SpanTreeModel {
});
const gapSpan = this.generateSpanGap(
- group[0].span,
+ group[0]!.span,
event,
acc.previousSiblingEndTimestamp,
treeDepth + 1,
@@ -512,7 +512,7 @@ class SpanTreeModel {
// if the spans are filtered or out of bounds here
if (
- this.isSpanFilteredOut(props, group[0]) ||
+ this.isSpanFilteredOut(props, group[0]!) ||
groupShouldBeHidden(group, focusedSpanIds)
) {
group.forEach(spanModel => {
@@ -525,8 +525,8 @@ class SpanTreeModel {
}
const bounds = generateBounds({
- startTimestamp: group[0].span.start_timestamp,
- endTimestamp: group[group.length - 1].span.timestamp,
+ startTimestamp: group[0]!.span.start_timestamp,
+ endTimestamp: group[group.length - 1]!.span.timestamp,
});
if (!bounds.isSpanVisibleInView) {
@@ -540,7 +540,7 @@ class SpanTreeModel {
}
const gapSpan = this.generateSpanGap(
- group[0].span,
+ group[0]!.span,
event,
acc.previousSiblingEndTimestamp,
treeDepth + 1,
@@ -590,7 +590,7 @@ class SpanTreeModel {
};
acc.previousSiblingEndTimestamp =
- wrappedSiblings[wrappedSiblings.length - 1].span.timestamp;
+ wrappedSiblings[wrappedSiblings.length - 1]!.span.timestamp;
acc.descendants.push(groupedSiblingsSpan);
return acc;
@@ -678,14 +678,14 @@ class SpanTreeModel {
spanNestedGrouping.length === 1
) {
if (!isNestedSpanGroupExpanded) {
- const parentSpan = spanNestedGrouping[0].span;
+ const parentSpan = spanNestedGrouping[0]!.span;
const parentSpanBounds = generateBounds({
startTimestamp: parentSpan.start_timestamp,
endTimestamp: parentSpan.timestamp,
});
const isParentSpanOutOfView = !parentSpanBounds.isSpanVisibleInView;
if (!isParentSpanOutOfView) {
- return [spanNestedGrouping[0], wrappedSpan, ...descendants];
+ return [spanNestedGrouping[0]!, wrappedSpan, ...descendants];
}
}
diff --git a/static/app/components/events/interfaces/spans/traceView.spec.tsx b/static/app/components/events/interfaces/spans/traceView.spec.tsx
index 8a916e488f39b3..5285505fa72cce 100644
--- a/static/app/components/events/interfaces/spans/traceView.spec.tsx
+++ b/static/app/components/events/interfaces/spans/traceView.spec.tsx
@@ -20,14 +20,14 @@ import ProjectsStore from 'sentry/stores/projectsStore';
import {QuickTraceContext} from 'sentry/utils/performance/quickTrace/quickTraceContext';
import QuickTraceQuery from 'sentry/utils/performance/quickTrace/quickTraceQuery';
-function initializeData(settings) {
+function initializeData(settings: Parameters[0]) {
const data = _initializeData(settings);
ProjectsStore.loadInitialData(data.projects);
return data;
}
describe('TraceView', () => {
- let data;
+ let data!: ReturnType;
beforeEach(() => {
data = initializeData({});
@@ -202,7 +202,7 @@ describe('TraceView', () => {
expect(screen.queryAllByText('group me')).toHaveLength(2);
- const firstGroup = screen.queryAllByText('Autogrouped — http —')[0];
+ const firstGroup = screen.queryAllByText('Autogrouped — http —')[0]!;
await userEvent.click(firstGroup);
expect(await screen.findAllByText('group me')).toHaveLength(6);
@@ -210,7 +210,7 @@ describe('TraceView', () => {
await userEvent.click(secondGroup);
expect(await screen.findAllByText('group me')).toHaveLength(10);
- const firstRegroup = screen.queryAllByText('Regroup')[0];
+ const firstRegroup = screen.queryAllByText('Regroup')[0]!;
await userEvent.click(firstRegroup);
expect(await screen.findAllByText('group me')).toHaveLength(6);
diff --git a/static/app/components/events/interfaces/spans/traceView.tsx b/static/app/components/events/interfaces/spans/traceView.tsx
index 317f76f18ba9d2..f294cfa84b540e 100644
--- a/static/app/components/events/interfaces/spans/traceView.tsx
+++ b/static/app/components/events/interfaces/spans/traceView.tsx
@@ -1,4 +1,4 @@
-import {createRef, memo, useEffect, useState} from 'react';
+import {memo, useEffect, useRef, useState} from 'react';
import {Observer} from 'mobx-react';
import EmptyStateWarning from 'sentry/components/emptyStateWarning';
@@ -26,10 +26,10 @@ type Props = {
};
function TraceView(props: Props) {
- const traceViewRef = createRef();
- const traceViewHeaderRef = createRef();
- const virtualScrollBarContainerRef = createRef();
- const minimapInteractiveRef = createRef();
+ const traceViewRef = useRef(null);
+ const traceViewHeaderRef = useRef(null);
+ const virtualScrollBarContainerRef = useRef(null);
+ const minimapInteractiveRef = useRef(null);
const [isMounted, setIsMounted] = useState(false);
diff --git a/static/app/components/events/interfaces/spans/types.tsx b/static/app/components/events/interfaces/spans/types.tsx
index ccd8afd617f095..8581dfc8dc2927 100644
--- a/static/app/components/events/interfaces/spans/types.tsx
+++ b/static/app/components/events/interfaces/spans/types.tsx
@@ -48,7 +48,6 @@ export type RawSpanType = {
// this is essentially end_timestamp
timestamp: number;
trace_id: string;
- _metrics_summary?: MetricsSummary;
data?: SpanSourceCodeAttributes & SpanDatabaseAttributes & Record;
description?: string;
exclusive_time?: number;
@@ -102,7 +101,6 @@ export const rawSpanKeys: Set = new Set([
'tags',
'hash',
'exclusive_time',
- '_metrics_summary',
]);
export type OrphanSpanType = RawSpanType & {
diff --git a/static/app/components/events/interfaces/spans/utils.spec.tsx b/static/app/components/events/interfaces/spans/utils.spec.tsx
index 9ea31d0518903b..9a81bbbba3c10d 100644
--- a/static/app/components/events/interfaces/spans/utils.spec.tsx
+++ b/static/app/components/events/interfaces/spans/utils.spec.tsx
@@ -12,29 +12,29 @@ describe('test utility functions', function () {
1658925888.60193
);
- expect(result.start).toEqual('1658925888.601534');
- expect(result.end).toEqual('1658925888.601930');
+ expect(result.start).toBe('1658925888.601534');
+ expect(result.end).toBe('1658925888.601930');
result = getFormattedTimeRangeWithLeadingAndTrailingZero(
1658925888.601534,
165892588.060193
);
- expect(result.start).toEqual('1658925888.601534');
- expect(result.end).toEqual('0165892588.060193');
+ expect(result.start).toBe('1658925888.601534');
+ expect(result.end).toBe('0165892588.060193');
result = getFormattedTimeRangeWithLeadingAndTrailingZero(
16589258.6015,
1658925888.060193
);
- expect(result.start).toEqual('0016589258.601500');
- expect(result.end).toEqual('1658925888.060193');
+ expect(result.start).toBe('0016589258.601500');
+ expect(result.end).toBe('1658925888.060193');
result = getFormattedTimeRangeWithLeadingAndTrailingZero(
1658925888.601534,
1658925888.601935
);
- expect(result.start).toEqual('1658925888.601534');
- expect(result.end).toEqual('1658925888.601935');
+ expect(result.start).toBe('1658925888.601534');
+ expect(result.end).toBe('1658925888.601935');
});
});
diff --git a/static/app/components/events/interfaces/spans/utils.tsx b/static/app/components/events/interfaces/spans/utils.tsx
index b3d25167f9212d..e5cade8519816b 100644
--- a/static/app/components/events/interfaces/spans/utils.tsx
+++ b/static/app/components/events/interfaces/spans/utils.tsx
@@ -700,7 +700,7 @@ function hasFailedThreshold(marks: Measurements): boolean {
);
return records.some(record => {
- const {value} = marks[record.slug];
+ const {value} = marks[record.slug]!;
if (typeof value === 'number' && typeof record.poorThreshold === 'number') {
return value >= record.poorThreshold;
}
@@ -733,7 +733,7 @@ export function getMeasurements(
const measurements = Object.keys(event.measurements)
.filter(name => allowedVitals.has(`measurements.${name}`))
.map(name => {
- const associatedMeasurement = event.measurements![name];
+ const associatedMeasurement = event.measurements![name]!;
return {
name,
// Time timestamp is in seconds, but the measurement value is given in ms so convert it here
@@ -947,8 +947,8 @@ export function getSpanGroupTimestamps(spanGroup: EnhancedSpan[]) {
};
},
{
- startTimestamp: spanGroup[0].span.start_timestamp,
- endTimestamp: spanGroup[0].span.timestamp,
+ startTimestamp: spanGroup[0]!.span.start_timestamp,
+ endTimestamp: spanGroup[0]!.span.timestamp,
}
);
}
@@ -1065,22 +1065,22 @@ export function getFormattedTimeRangeWithLeadingAndTrailingZero(
start: string[];
}>(
(acc, startString, index) => {
- if (startString.length > endStrings[index].length) {
+ if (startString.length > endStrings[index]!.length) {
acc.start.push(startString);
acc.end.push(
index === 0
- ? endStrings[index].padStart(startString.length, '0')
- : endStrings[index].padEnd(startString.length, '0')
+ ? endStrings[index]!.padStart(startString.length, '0')
+ : endStrings[index]!.padEnd(startString.length, '0')
);
return acc;
}
acc.start.push(
index === 0
- ? startString.padStart(endStrings[index].length, '0')
- : startString.padEnd(endStrings[index].length, '0')
+ ? startString.padStart(endStrings[index]!.length, '0')
+ : startString.padEnd(endStrings[index]!.length, '0')
);
- acc.end.push(endStrings[index]);
+ acc.end.push(endStrings[index]!);
return acc;
},
{start: [], end: []}
diff --git a/static/app/components/events/interfaces/spans/waterfallModel.spec.tsx b/static/app/components/events/interfaces/spans/waterfallModel.spec.tsx
index 6b2f210ab39afd..4bdef959fb8453 100644
--- a/static/app/components/events/interfaces/spans/waterfallModel.spec.tsx
+++ b/static/app/components/events/interfaces/spans/waterfallModel.spec.tsx
@@ -626,16 +626,16 @@ describe('WaterfallModel', () => {
it('querySpanSearch', async () => {
const waterfallModel = new WaterfallModel(event);
- expect(waterfallModel.fuse).toBe(undefined);
+ expect(waterfallModel.fuse).toBeUndefined();
// Fuzzy search needs to be loaded asynchronously
await tick();
// expect fuse index to be created
- expect(waterfallModel.fuse).not.toBe(undefined);
+ expect(waterfallModel.fuse).toBeDefined();
- expect(waterfallModel.filterSpans).toBe(undefined);
- expect(waterfallModel.searchQuery).toBe(undefined);
+ expect(waterfallModel.filterSpans).toBeUndefined();
+ expect(waterfallModel.searchQuery).toBeUndefined();
waterfallModel.querySpanSearch('GET /api/0/organizations/?member=1');
@@ -668,12 +668,12 @@ describe('WaterfallModel', () => {
expected[1] = {
type: 'out_of_view',
- span: fullWaterfall[1].span,
+ span: fullWaterfall[1]!!.span,
} as EnhancedProcessedSpanType;
expected[4] = {
type: 'out_of_view',
- span: fullWaterfall[4].span,
+ span: fullWaterfall[4]!.span,
} as EnhancedProcessedSpanType;
expect(spans).toEqual(expected);
@@ -686,51 +686,51 @@ describe('WaterfallModel', () => {
});
assert(
- fullWaterfall[10].type === 'span_group_chain' &&
- fullWaterfall[10].spanNestedGrouping
+ fullWaterfall[10]!.type === 'span_group_chain' &&
+ fullWaterfall[10]!.spanNestedGrouping
);
expected = [
{
type: 'filtered_out',
- span: fullWaterfall[0].span,
+ span: fullWaterfall[0]!.span,
},
{
type: 'out_of_view',
- span: fullWaterfall[1].span,
+ span: fullWaterfall[1]!!.span,
},
fullWaterfall[2],
fullWaterfall[3],
{
type: 'filtered_out',
- span: fullWaterfall[4].span,
+ span: fullWaterfall[4]!.span,
},
{
type: 'filtered_out',
- span: fullWaterfall[5].span,
+ span: fullWaterfall[5]!.span,
},
{
type: 'filtered_out',
- span: fullWaterfall[6].span,
+ span: fullWaterfall[6]!.span,
},
{
type: 'filtered_out',
- span: fullWaterfall[7].span,
+ span: fullWaterfall[7]!.span,
},
{
type: 'filtered_out',
- span: fullWaterfall[9].span,
+ span: fullWaterfall[9]!.span,
},
{
type: 'filtered_out',
- span: fullWaterfall[10].spanNestedGrouping[0].span,
+ span: fullWaterfall[10]!.spanNestedGrouping![0]!.span,
},
{
type: 'filtered_out',
- span: fullWaterfall[10].spanNestedGrouping[1].span,
+ span: fullWaterfall[10]!.spanNestedGrouping![1]!.span,
},
{
type: 'filtered_out',
- span: fullWaterfall[11].span,
+ span: fullWaterfall[11]!.span,
},
] as EnhancedProcessedSpanType[];
@@ -746,7 +746,7 @@ describe('WaterfallModel', () => {
viewEnd: 0.65,
});
- expected[1].type = 'filtered_out';
+ expected[1]!.type = 'filtered_out';
expect(spans).toEqual(expected);
});
@@ -814,7 +814,7 @@ describe('WaterfallModel', () => {
...event,
entries: [
{
- data: [event.entries[0].data[0]],
+ data: [event.entries[0]!.data[0]],
type: EntryType.SPANS,
},
],
@@ -834,7 +834,7 @@ describe('WaterfallModel', () => {
toggleNestedSpanGroup: undefined,
},
{
- ...fullWaterfall[1],
+ ...fullWaterfall[1]!,
isLastSibling: true,
numOfSpanChildren: 0,
toggleNestedSpanGroup: undefined,
@@ -848,10 +848,10 @@ describe('WaterfallModel', () => {
entries: [
{
data: [
- event.entries[0].data[0],
+ event.entries[0]!.data[0],
{
- ...event.entries[0].data[0],
- parent_span_id: event.entries[0].data[0].span_id,
+ ...event.entries[0]!.data[0],
+ parent_span_id: event.entries[0]!.data[0].span_id,
span_id: 'foo',
},
],
@@ -875,17 +875,17 @@ describe('WaterfallModel', () => {
toggleNestedSpanGroup: undefined,
},
{
- ...fullWaterfall[1],
+ ...fullWaterfall[1]!,
treeDepth: 1,
isLastSibling: true,
numOfSpanChildren: 1,
toggleNestedSpanGroup: undefined,
},
{
- ...fullWaterfall[1],
+ ...fullWaterfall[1]!,
span: {
- ...fullWaterfall[1].span,
- parent_span_id: event.entries[0].data[0].span_id,
+ ...fullWaterfall[1]!!.span,
+ parent_span_id: event.entries[0]!.data[0]!.span_id,
span_id: 'foo',
},
treeDepth: 2,
@@ -902,14 +902,14 @@ describe('WaterfallModel', () => {
entries: [
{
data: [
- event.entries[0].data[0],
+ event.entries[0]!.data[0],
{
- ...event.entries[0].data[0],
- parent_span_id: event.entries[0].data[0].span_id,
+ ...event.entries[0]!.data[0],
+ parent_span_id: event.entries[0]!.data[0]!.span_id,
span_id: 'foo',
},
{
- ...event.entries[0].data[0],
+ ...event.entries[0]!.data[0],
parent_span_id: 'foo',
span_id: 'bar',
},
@@ -928,7 +928,7 @@ describe('WaterfallModel', () => {
// expect 1 or more spans are grouped
expect(spans).toHaveLength(3);
- assert(fullWaterfall[1].type === 'span');
+ assert(fullWaterfall[1]!.type === 'span');
const collapsedWaterfallExpected = [
{
...fullWaterfall[0],
@@ -938,24 +938,24 @@ describe('WaterfallModel', () => {
{
type: 'span_group_chain',
treeDepth: 1,
- continuingTreeDepths: fullWaterfall[1].continuingTreeDepths,
+ continuingTreeDepths: fullWaterfall[1]!.continuingTreeDepths,
span: {
- ...fullWaterfall[1].span,
+ ...fullWaterfall[1]!.span,
parent_span_id: 'foo',
span_id: 'bar',
},
spanNestedGrouping: [
{
- ...fullWaterfall[1],
+ ...fullWaterfall[1]!,
isLastSibling: true,
numOfSpanChildren: 1,
toggleNestedSpanGroup: undefined,
},
{
- ...fullWaterfall[1],
+ ...fullWaterfall[1]!,
span: {
- ...fullWaterfall[1].span,
- parent_span_id: event.entries[0].data[0].span_id,
+ ...fullWaterfall[1]!.span,
+ parent_span_id: event.entries[0]!.data[0].span_id,
span_id: 'foo',
},
isLastSibling: true,
@@ -967,9 +967,9 @@ describe('WaterfallModel', () => {
toggleNestedSpanGroup: expect.anything(),
},
{
- ...fullWaterfall[1],
+ ...fullWaterfall[1]!,
span: {
- ...fullWaterfall[1].span,
+ ...fullWaterfall[1]!.span,
parent_span_id: 'foo',
span_id: 'bar',
},
@@ -983,8 +983,8 @@ describe('WaterfallModel', () => {
expect(spans).toEqual(collapsedWaterfallExpected);
// Expand span group
- assert(spans[1].type === 'span' && spans[1].toggleNestedSpanGroup);
- spans[1].toggleNestedSpanGroup();
+ assert(spans[1]!.type === 'span' && spans[1]!.toggleNestedSpanGroup);
+ spans[1]!.toggleNestedSpanGroup();
spans = waterfallModel.getWaterfall({
viewStart: 0,
@@ -1002,17 +1002,17 @@ describe('WaterfallModel', () => {
toggleNestedSpanGroup: undefined,
},
{
- ...fullWaterfall[1],
+ ...fullWaterfall[1]!,
isLastSibling: true,
numOfSpanChildren: 1,
treeDepth: 1,
toggleNestedSpanGroup: expect.anything(),
},
{
- ...fullWaterfall[1],
+ ...fullWaterfall[1]!,
span: {
- ...fullWaterfall[1].span,
- parent_span_id: event.entries[0].data[0].span_id,
+ ...fullWaterfall[1]!.span,
+ parent_span_id: event.entries[0]!.data[0].span_id,
span_id: 'foo',
},
isLastSibling: true,
@@ -1021,9 +1021,9 @@ describe('WaterfallModel', () => {
toggleNestedSpanGroup: undefined,
},
{
- ...fullWaterfall[1],
+ ...fullWaterfall[1]!,
span: {
- ...fullWaterfall[1].span,
+ ...fullWaterfall[1]!.span,
parent_span_id: 'foo',
span_id: 'bar',
},
@@ -1035,8 +1035,8 @@ describe('WaterfallModel', () => {
]);
// Collapse span group
- assert(spans[1].type === 'span' && spans[1].toggleNestedSpanGroup);
- spans[1].toggleNestedSpanGroup();
+ assert(spans[1]!.type === 'span' && spans[1]!.toggleNestedSpanGroup);
+ spans[1]!.toggleNestedSpanGroup();
spans = waterfallModel.getWaterfall({
viewStart: 0,
diff --git a/static/app/components/events/interfaces/spans/waterfallModel.tsx b/static/app/components/events/interfaces/spans/waterfallModel.tsx
index a77d4c2d0d719c..2b0b446cbbb03a 100644
--- a/static/app/components/events/interfaces/spans/waterfallModel.tsx
+++ b/static/app/components/events/interfaces/spans/waterfallModel.tsx
@@ -283,8 +283,8 @@ class WaterfallModel {
};
},
{
- traceStartTimestamp: this.traceBounds[0].traceStartTimestamp,
- traceEndTimestamp: this.traceBounds[0].traceEndTimestamp,
+ traceStartTimestamp: this.traceBounds[0]!.traceStartTimestamp,
+ traceEndTimestamp: this.traceBounds[0]!.traceEndTimestamp,
}
);
};
diff --git a/static/app/components/events/interfaces/threads.spec.tsx b/static/app/components/events/interfaces/threads.spec.tsx
index 3ff3b1fce7e1a0..f96be912bf81cd 100644
--- a/static/app/components/events/interfaces/threads.spec.tsx
+++ b/static/app/components/events/interfaces/threads.spec.tsx
@@ -218,7 +218,7 @@ describe('Threads', function () {
};
const props: React.ComponentProps = {
- data: event.entries[1].data as React.ComponentProps['data'],
+ data: event.entries[1]!.data as React.ComponentProps['data'],
event,
groupingCurrentLevel: 0,
projectSlug: project.slug,
@@ -268,7 +268,7 @@ describe('Threads', function () {
render( , {organization});
expect(
- within(screen.getAllByTestId('line')[0]).getByText(
+ within(screen.getAllByTestId('line')[0]!).getByText(
'sentry/controllers/welcome_controller.rb'
)
).toBeInTheDocument();
@@ -287,7 +287,7 @@ describe('Threads', function () {
// Last frame is the first on the list
expect(
- within(screen.getAllByTestId('line')[0]).getByText(
+ within(screen.getAllByTestId('line')[0]!).getByText(
'puma (3.12.6) lib/puma/server.rb'
)
).toBeInTheDocument();
@@ -298,7 +298,7 @@ describe('Threads', function () {
// First frame is the first on the list
expect(
- within(screen.getAllByTestId('line')[0]).getByText(
+ within(screen.getAllByTestId('line')[0]!).getByText(
'sentry/controllers/welcome_controller.rb'
)
).toBeInTheDocument();
@@ -896,7 +896,7 @@ describe('Threads', function () {
};
const props: React.ComponentProps = {
- data: event.entries[1].data as React.ComponentProps['data'],
+ data: event.entries[1]!.data as React.ComponentProps['data'],
event,
groupingCurrentLevel: 0,
projectSlug: project.slug,
@@ -915,9 +915,9 @@ describe('Threads', function () {
expect(screen.getByRole('radio', {name: 'Full Stack Trace'})).not.toBeChecked();
expect(screen.getByRole('button', {name: 'Options'})).toBeInTheDocument();
- expect(screen.queryByText('Threads')).toBeInTheDocument();
- expect(screen.queryByText('Thread State')).toBeInTheDocument();
- expect(screen.queryByText('Thread Tags')).toBeInTheDocument();
+ expect(screen.getByText('Threads')).toBeInTheDocument();
+ expect(screen.getByText('Thread State')).toBeInTheDocument();
+ expect(screen.getByText('Thread Tags')).toBeInTheDocument();
// Stack Trace
expect(screen.getByRole('heading', {name: 'EXC_BAD_ACCESS'})).toBeInTheDocument();
@@ -1013,7 +1013,7 @@ describe('Threads', function () {
it('maps android vm states to java vm states', async function () {
const newEvent = {...event};
- const threadsEntry = newEvent.entries[1].data as React.ComponentProps<
+ const threadsEntry = newEvent.entries[1]!.data as React.ComponentProps<
typeof Threads
>['data'];
const thread = {
@@ -1089,7 +1089,7 @@ describe('Threads', function () {
render( , {organization});
expect(
- within(screen.getAllByTestId('stack-trace-frame')[0]).getByText(
+ within(screen.getAllByTestId('stack-trace-frame')[0]!).getByText(
'-[SentryClient crash]'
)
).toBeInTheDocument();
@@ -1115,7 +1115,7 @@ describe('Threads', function () {
// Last frame is the first on the list
expect(
- within(screen.getAllByTestId('stack-trace-frame')[0]).getByText('UIKit')
+ within(screen.getAllByTestId('stack-trace-frame')[0]!).getByText('UIKit')
).toBeInTheDocument();
// Switch back to recent first
@@ -1124,7 +1124,7 @@ describe('Threads', function () {
// First frame is the first on the list
expect(
- within(screen.getAllByTestId('stack-trace-frame')[0]).getByText(
+ within(screen.getAllByTestId('stack-trace-frame')[0]!).getByText(
'-[SentryClient crash]'
)
).toBeInTheDocument();
@@ -1159,7 +1159,7 @@ describe('Threads', function () {
// Function name is not verbose
expect(
- within(screen.getAllByTestId('stack-trace-frame')[1]).getByText(
+ within(screen.getAllByTestId('stack-trace-frame')[1]!).getByText(
'ViewController.causeCrash'
)
).toBeInTheDocument();
@@ -1169,14 +1169,14 @@ describe('Threads', function () {
// Function name is now verbose
expect(
- within(screen.getAllByTestId('stack-trace-frame')[1]).getByText(
+ within(screen.getAllByTestId('stack-trace-frame')[1]!).getByText(
'ViewController.causeCrash(Any) -> ()'
)
).toBeInTheDocument();
// Address is not absolute
expect(
- within(screen.getAllByTestId('stack-trace-frame')[1]).getByText('+0x085ac')
+ within(screen.getAllByTestId('stack-trace-frame')[1]!).getByText('+0x085ac')
).toBeInTheDocument();
// Click on absolute file paths option
@@ -1184,7 +1184,7 @@ describe('Threads', function () {
// Address is now absolute
expect(
- within(screen.getAllByTestId('stack-trace-frame')[1]).getByText('0x10008c5ac')
+ within(screen.getAllByTestId('stack-trace-frame')[1]!).getByText('0x10008c5ac')
).toBeInTheDocument();
MockApiClient.addMockResponse({
@@ -1215,7 +1215,7 @@ describe('Threads', function () {
it('uses thread label in selector if name not available', async function () {
const newEvent = {...event};
- const threadsEntry = newEvent.entries[1].data as React.ComponentProps<
+ const threadsEntry = newEvent.entries[1]!.data as React.ComponentProps<
typeof Threads
>['data'];
const thread = {
diff --git a/static/app/components/events/interfaces/threads/threadSelector/getThreadException.tsx b/static/app/components/events/interfaces/threads/threadSelector/getThreadException.tsx
index 23e490792c9033..c113527cda8533 100644
--- a/static/app/components/events/interfaces/threads/threadSelector/getThreadException.tsx
+++ b/static/app/components/events/interfaces/threads/threadSelector/getThreadException.tsx
@@ -6,12 +6,12 @@ function getException(
exceptionDataValues: ExceptionValue[],
thread: Thread
) {
- if (exceptionDataValues.length === 1 && !exceptionDataValues[0].stacktrace) {
+ if (exceptionDataValues.length === 1 && !exceptionDataValues[0]!.stacktrace) {
return {
...exceptionData,
values: [
{
- ...exceptionDataValues[0],
+ ...exceptionDataValues[0]!,
stacktrace: thread.stacktrace,
rawStacktrace: thread.rawStacktrace,
},
diff --git a/static/app/components/events/interfaces/utils.spec.tsx b/static/app/components/events/interfaces/utils.spec.tsx
index d778f599737766..e9d0d73e81c349 100644
--- a/static/app/components/events/interfaces/utils.spec.tsx
+++ b/static/app/components/events/interfaces/utils.spec.tsx
@@ -89,7 +89,7 @@ describe('components/interfaces/utils', function () {
query: [['foo', 'bar']],
method: 'GET',
})
- ).toEqual('curl \\\n "http://example.com/foo?foo=bar"');
+ ).toBe('curl \\\n "http://example.com/foo?foo=bar"');
// Do not add `data` if `data` is empty object
expect(
@@ -105,7 +105,7 @@ describe('components/interfaces/utils', function () {
data: {},
method: 'GET',
})
- ).toEqual('curl \\\n "http://example.com/foo"');
+ ).toBe('curl \\\n "http://example.com/foo"');
// Filter out undefined headers
expect(
@@ -284,16 +284,16 @@ describe('components/interfaces/utils', function () {
it('should preserve unfiltered values', function () {
const result = userContextToActor(rawData);
expect(result).toHaveProperty('id');
- expect(result.id).toEqual('26');
+ expect(result.id).toBe('26');
expect(result).toHaveProperty('username');
- expect(result.username).toEqual('maiseythedog');
+ expect(result.username).toBe('maiseythedog');
});
});
describe('stringifyQueryList()', function () {
it('should return query if it is a string', function () {
const query = stringifyQueryList('query');
- expect(query).toEqual('query');
+ expect(query).toBe('query');
});
it('should parse query tuples', function () {
const query = stringifyQueryList([
@@ -302,9 +302,7 @@ describe('components/interfaces/utils', function () {
['field', 'total.time'],
['numBuckets', '100'],
]);
- expect(query).toEqual(
- 'field=ops.http&field=ops.db&field=total.time&numBuckets=100'
- );
+ expect(query).toBe('field=ops.http&field=ops.db&field=total.time&numBuckets=100');
});
});
@@ -332,7 +330,7 @@ describe('components/interfaces/utils', function () {
],
})
);
- expect(thread?.name).toEqual('puma 002');
+ expect(thread?.name).toBe('puma 002');
});
});
@@ -361,7 +359,7 @@ describe('components/interfaces/utils', function () {
}),
13920
);
- expect(thread?.name).toEqual('puma 002');
+ expect(thread?.name).toBe('puma 002');
});
});
});
diff --git a/static/app/components/events/interfaces/utils.tsx b/static/app/components/events/interfaces/utils.tsx
index 5020068be07e2b..62b175a4c10aa3 100644
--- a/static/app/components/events/interfaces/utils.tsx
+++ b/static/app/components/events/interfaces/utils.tsx
@@ -89,7 +89,7 @@ export function getHiddenFrameIndices({
const index = parseInt(indexString, 10);
const indicesToBeAdded: number[] = [];
let i = 1;
- let numHidden = frameCountMap[index];
+ let numHidden = frameCountMap[index]!;
while (numHidden > 0) {
if (!repeatedIndeces.includes(index - i)) {
indicesToBeAdded.push(index - i);
@@ -305,7 +305,7 @@ export function parseAssembly(assembly: string | null) {
}
for (let i = 1; i < pieces.length; i++) {
- const [key, value] = pieces[i].trim().split('=');
+ const [key, value] = pieces[i]!.trim().split('=');
// eslint-disable-next-line default-case
switch (key) {
diff --git a/static/app/components/events/opsBreakdown.tsx b/static/app/components/events/opsBreakdown.tsx
index d4fd56bc1e9cd8..e489c0edfa9ba1 100644
--- a/static/app/components/events/opsBreakdown.tsx
+++ b/static/app/components/events/opsBreakdown.tsx
@@ -373,7 +373,7 @@ function mergeInterval(intervals: TimeWindowSpan[]): TimeWindowSpan[] {
continue;
}
- const lastInterval = merged[merged.length - 1];
+ const lastInterval = merged[merged.length - 1]!;
const lastIntervalEnd = lastInterval[1];
const [currentIntervalStart, currentIntervalEnd] = currentInterval;
diff --git a/static/app/components/events/searchBar.tsx b/static/app/components/events/searchBar.tsx
index 7c5113644f29db..dbfac9253a5b46 100644
--- a/static/app/components/events/searchBar.tsx
+++ b/static/app/components/events/searchBar.tsx
@@ -3,9 +3,9 @@ import memoize from 'lodash/memoize';
import omit from 'lodash/omit';
import {fetchSpanFieldValues, fetchTagValues} from 'sentry/actionCreators/tags';
+import SmartSearchBar from 'sentry/components/deprecatedSmartSearchBar';
import type {SearchConfig} from 'sentry/components/searchSyntax/parser';
import {defaultConfig} from 'sentry/components/searchSyntax/parser';
-import SmartSearchBar from 'sentry/components/smartSearchBar';
import type {TagCollection} from 'sentry/types/group';
import {SavedSearchType} from 'sentry/types/group';
import type {Organization} from 'sentry/types/organization';
@@ -98,19 +98,19 @@ const getSearchConfigFromCustomPerformanceMetrics = (
numericKeys: [...defaultConfig.numericKeys],
};
Object.keys(customPerformanceMetrics).forEach(metricName => {
- const {fieldType} = customPerformanceMetrics[metricName];
+ const {fieldType} = customPerformanceMetrics[metricName]!;
switch (fieldType) {
case 'size':
- searchConfigMap.sizeKeys.push(metricName);
+ searchConfigMap.sizeKeys!.push(metricName);
break;
case 'duration':
- searchConfigMap.durationKeys.push(metricName);
+ searchConfigMap.durationKeys!.push(metricName);
break;
case 'percentage':
- searchConfigMap.percentageKeys.push(metricName);
+ searchConfigMap.percentageKeys!.push(metricName);
break;
default:
- searchConfigMap.numericKeys.push(metricName);
+ searchConfigMap.numericKeys!.push(metricName);
}
});
const searchConfig = {
@@ -222,7 +222,7 @@ function SearchBar(props: SearchBarProps) {
projectIds: projectIdStrings,
endpointParams,
// allows searching for tags on transactions as well
- includeTransactions: includeTransactions,
+ includeTransactions,
// allows searching for tags on sessions as well
includeSessions: includeSessionTagsValues,
dataset: dataset ? DiscoverDatasetsToDatasetMap[dataset] : undefined,
diff --git a/static/app/components/events/viewHierarchy/index.spec.tsx b/static/app/components/events/viewHierarchy/index.spec.tsx
index ae97f3986a3c05..a34df368f49bea 100644
--- a/static/app/components/events/viewHierarchy/index.spec.tsx
+++ b/static/app/components/events/viewHierarchy/index.spec.tsx
@@ -2,7 +2,7 @@ import {ProjectFixture} from 'sentry-fixture/project';
import {render, screen, userEvent, within} from 'sentry-test/reactTestingLibrary';
-import {ViewHierarchy} from '.';
+import {ViewHierarchy, type ViewHierarchyData} from './index';
// Mocks for useVirtualizedTree hook
class ResizeObserver {
@@ -47,8 +47,8 @@ const DEFAULT_MOCK_DATA = {
};
describe('View Hierarchy', function () {
- let MOCK_DATA;
- let project;
+ let MOCK_DATA!: ViewHierarchyData;
+ let project!: ReturnType;
beforeEach(() => {
MOCK_DATA = DEFAULT_MOCK_DATA;
project = ProjectFixture();
@@ -78,7 +78,7 @@ describe('View Hierarchy', function () {
it('can expand and collapse by clicking the icon', async function () {
render( );
- expect(screen.queryByText('Text')).toBeInTheDocument();
+ expect(screen.getByText('Text')).toBeInTheDocument();
await userEvent.click(
within(screen.getByLabelText('Nested Container - nested')).getByRole('button', {
@@ -90,13 +90,13 @@ describe('View Hierarchy', function () {
await userEvent.click(screen.getByRole('button', {name: 'Expand'}));
- expect(screen.queryByText('Text')).toBeInTheDocument();
+ expect(screen.getByText('Text')).toBeInTheDocument();
});
it('can navigate with keyboard shortcuts after a selection', async function () {
render( );
- await userEvent.click(screen.getAllByText('Container - test_identifier')[0]);
+ await userEvent.click(screen.getAllByText('Container - test_identifier')[0]!);
await userEvent.keyboard('{ArrowDown}');
@@ -107,7 +107,7 @@ describe('View Hierarchy', function () {
it('can expand/collapse with the keyboard', async function () {
render( );
- await userEvent.click(screen.getAllByText('Nested Container - nested')[0]);
+ await userEvent.click(screen.getAllByText('Nested Container - nested')[0]!);
await userEvent.keyboard('{Enter}');
diff --git a/static/app/components/events/viewHierarchy/utils.spec.tsx b/static/app/components/events/viewHierarchy/utils.spec.tsx
index 36dd4e8c483e3f..72d07e1e5374ee 100644
--- a/static/app/components/events/viewHierarchy/utils.spec.tsx
+++ b/static/app/components/events/viewHierarchy/utils.spec.tsx
@@ -91,32 +91,32 @@ describe('View Hierarchy Utils', function () {
it('zooms in on small content', function () {
maxCoordinateDimensions = {height: 2, width: 2};
actual = calculateScale(bounds, maxCoordinateDimensions, border);
- expect(actual).toEqual(5);
+ expect(actual).toBe(5);
});
it('shrinks larger content', function () {
maxCoordinateDimensions = {height: 20, width: 20};
actual = calculateScale(bounds, maxCoordinateDimensions, border);
- expect(actual).toEqual(0.5);
+ expect(actual).toBe(0.5);
});
it('works with an irregular size where height is the dominant factor', function () {
maxCoordinateDimensions = {height: 20, width: 2};
actual = calculateScale(bounds, maxCoordinateDimensions, border);
- expect(actual).toEqual(0.5);
+ expect(actual).toBe(0.5);
});
it('works with an irregular size where width is the dominant factor', function () {
maxCoordinateDimensions = {height: 10, width: 32};
actual = calculateScale(bounds, maxCoordinateDimensions, border);
- expect(actual).toEqual(0.3125);
+ expect(actual).toBe(0.3125);
});
it('factors in the border', function () {
maxCoordinateDimensions = {height: 20, width: 20};
border = {x: 2, y: 2};
actual = calculateScale(bounds, maxCoordinateDimensions, border);
- expect(actual).toEqual(0.4);
+ expect(actual).toBe(0.4);
});
});
diff --git a/static/app/components/events/viewHierarchy/utils.tsx b/static/app/components/events/viewHierarchy/utils.tsx
index 6de7bd4aecdf3a..92a1017aafec52 100644
--- a/static/app/components/events/viewHierarchy/utils.tsx
+++ b/static/app/components/events/viewHierarchy/utils.tsx
@@ -21,7 +21,7 @@ export function useResizeCanvasObserver(canvases: (HTMLCanvasElement | null)[]):
const observer = watchForResize(canvases as HTMLCanvasElement[], entries => {
const contentRect =
- entries[0].contentRect ?? entries[0].target.getBoundingClientRect();
+ entries[0]!.contentRect ?? entries[0]!.target.getBoundingClientRect();
setCanvasBounds(
new Rect(
@@ -52,7 +52,7 @@ export function getHierarchyDimensions(
const nodes: ViewNode[] = [];
const queue: [Rect | null, ViewHierarchyWindow][] = [];
for (let i = hierarchies.length - 1; i >= 0; i--) {
- queue.push([null, hierarchies[i]]);
+ queue.push([null, hierarchies[i]!]);
}
let maxWidth = Number.MIN_SAFE_INTEGER;
@@ -77,7 +77,7 @@ export function getHierarchyDimensions(
// output nodes should have early children before later children
// i.e. we need to pop() off early children before ones that come after
for (let i = child.children.length - 1; i >= 0; i--) {
- queue.push([node.rect, child.children[i]]);
+ queue.push([node.rect, child.children[i]!]);
}
}
@@ -114,8 +114,8 @@ export function getDeepestNodeAtPoint(
vec2.scale(point, point, scale);
const transformedPoint = vec2.transformMat3(vec2.create(), point, inverseMatrix);
for (let i = 0; i < nodes.length; i++) {
- const node = nodes[i];
- if (node.rect.contains(transformedPoint)) {
+ const node = nodes[i]!;
+ if (node!.rect.contains(transformedPoint)) {
clickedNode = node;
}
}
diff --git a/static/app/components/events/viewHierarchy/wireframe.tsx b/static/app/components/events/viewHierarchy/wireframe.tsx
index 9877dad5a743b5..26d32e686837df 100644
--- a/static/app/components/events/viewHierarchy/wireframe.tsx
+++ b/static/app/components/events/viewHierarchy/wireframe.tsx
@@ -144,16 +144,16 @@ function Wireframe({hierarchy, selectedNode, onNodeSelect, project}: WireframePr
for (let i = 0; i < hierarchyData.nodes.length; i++) {
canvas.strokeRect(
- hierarchyData.nodes[i].rect.x,
- hierarchyData.nodes[i].rect.y,
- hierarchyData.nodes[i].rect.width,
- hierarchyData.nodes[i].rect.height
+ hierarchyData.nodes[i]!.rect.x,
+ hierarchyData.nodes[i]!.rect.y,
+ hierarchyData.nodes[i]!.rect.width,
+ hierarchyData.nodes[i]!.rect.height
);
canvas.fillRect(
- hierarchyData.nodes[i].rect.x,
- hierarchyData.nodes[i].rect.y,
- hierarchyData.nodes[i].rect.width,
- hierarchyData.nodes[i].rect.height
+ hierarchyData.nodes[i]!.rect.x,
+ hierarchyData.nodes[i]!.rect.y,
+ hierarchyData.nodes[i]!.rect.width,
+ hierarchyData.nodes[i]!.rect.height
);
}
}
diff --git a/static/app/components/featureFeedback/feedbackModal.tsx b/static/app/components/featureFeedback/feedbackModal.tsx
index 6e42a489b38c03..79767ae2bb9cd7 100644
--- a/static/app/components/featureFeedback/feedbackModal.tsx
+++ b/static/app/components/featureFeedback/feedbackModal.tsx
@@ -1,6 +1,7 @@
import {Fragment, useCallback, useMemo, useState} from 'react';
import {css, useTheme} from '@emotion/react';
import styled from '@emotion/styled';
+import type {Event} from '@sentry/core';
import {
BrowserClient,
captureFeedback,
@@ -8,7 +9,6 @@ import {
getDefaultIntegrations,
makeFetchTransport,
} from '@sentry/react';
-import type {Event} from '@sentry/types';
import cloneDeep from 'lodash/cloneDeep';
import {addSuccessMessage} from 'sentry/actionCreators/indicator';
diff --git a/static/app/components/feedback/feedbackItem/feedbackActions.tsx b/static/app/components/feedback/feedbackItem/feedbackActions.tsx
index ccd15fddf45972..e3dae0e97fbc6f 100644
--- a/static/app/components/feedback/feedbackItem/feedbackActions.tsx
+++ b/static/app/components/feedback/feedbackItem/feedbackActions.tsx
@@ -30,13 +30,16 @@ export default function FeedbackActions({
size,
style,
}: Props) {
+ if (!eventData) {
+ return null;
+ }
+
return (
-
+
diff --git a/static/app/components/feedback/feedbackItem/feedbackAssignedTo.tsx b/static/app/components/feedback/feedbackItem/feedbackAssignedTo.tsx
index d896c90b390333..e9b6c7f546557f 100644
--- a/static/app/components/feedback/feedbackItem/feedbackAssignedTo.tsx
+++ b/static/app/components/feedback/feedbackItem/feedbackAssignedTo.tsx
@@ -1,16 +1,14 @@
import {useEffect} from 'react';
-import styled from '@emotion/styled';
import {fetchOrgMembers} from 'sentry/actionCreators/members';
-import ActorAvatar from 'sentry/components/avatar/actorAvatar';
-import {Button} from 'sentry/components/button';
-import {DeprecatedAssigneeSelectorDropdown} from 'sentry/components/deprecatedAssigneeSelectorDropdown';
import useMutateFeedback from 'sentry/components/feedback/useMutateFeedback';
import type {EventOwners} from 'sentry/components/group/assignedTo';
-import {getAssignedToDisplayName, getOwnerList} from 'sentry/components/group/assignedTo';
-import {IconChevron, IconUser} from 'sentry/icons';
-import {t} from 'sentry/locale';
-import {space} from 'sentry/styles/space';
+import {getOwnerList} from 'sentry/components/group/assignedTo';
+import {
+ AssigneeSelector,
+ useHandleAssigneeChange,
+} from 'sentry/components/group/assigneeSelector';
+import type {Actor} from 'sentry/types/core';
import type {Group} from 'sentry/types/group';
import type {FeedbackEvent} from 'sentry/utils/feedback/types';
import {useApiQuery} from 'sentry/utils/queryClient';
@@ -18,16 +16,11 @@ import useApi from 'sentry/utils/useApi';
import useOrganization from 'sentry/utils/useOrganization';
interface Props {
- feedbackEvent: FeedbackEvent | undefined;
+ feedbackEvent: FeedbackEvent;
feedbackIssue: Group;
- showActorName: boolean;
}
-export default function FeedbackAssignedTo({
- feedbackIssue,
- feedbackEvent,
- showActorName,
-}: Props) {
+export default function FeedbackAssignedTo({feedbackIssue, feedbackEvent}: Props) {
const organization = useOrganization();
const api = useApi();
const project = feedbackIssue.project;
@@ -45,6 +38,10 @@ export default function FeedbackAssignedTo({
enabled: Boolean(feedbackEvent),
}
);
+ const {handleAssigneeChange, assigneeLoading} = useHandleAssigneeChange({
+ organization,
+ group: feedbackIssue,
+ });
const {assign} = useMutateFeedback({
feedbackIds: [feedbackIssue.id],
@@ -54,61 +51,15 @@ export default function FeedbackAssignedTo({
const owners = getOwnerList([], eventOwners, feedbackIssue.assignedTo);
- // A new `key` will make the component re-render when showActorName changes
- const key = showActorName ? 'showActor' : 'hideActor';
-
return (
- {
- assign(feedbackIssue.assignedTo);
- }}
- onClear={() => {
- assign(null);
- }}
- owners={owners}
+
- {({isOpen, getActorProps}) => (
-
-
- {!feedbackIssue.assignedTo ? (
-
- ) : (
-
- )}
- {showActorName ? (
-
- {getAssignedToDisplayName(feedbackIssue) ?? t('Unassigned')}
-
- ) : null}
-
-
-
- )}
-
+ owners={owners}
+ assigneeLoading={assigneeLoading}
+ handleAssigneeChange={e => {
+ assign(e?.assignee as Actor);
+ handleAssigneeChange(e);
+ }}
+ />
);
}
-
-const ActorWrapper = styled('div')`
- display: flex;
- align-items: center;
- gap: ${space(0.5)};
- max-width: 150px;
- line-height: 1;
-`;
-
-const ActorName = styled('div')`
- line-height: 1.2;
- ${p => p.theme.overflowEllipsis}
- font-size: ${p => p.theme.fontSizeSmall};
-`;
diff --git a/static/app/components/feedback/feedbackItem/feedbackItemHeader.tsx b/static/app/components/feedback/feedbackItem/feedbackItemHeader.tsx
index 179418b9707e88..10a60cfbc44aa2 100644
--- a/static/app/components/feedback/feedbackItem/feedbackItemHeader.tsx
+++ b/static/app/components/feedback/feedbackItem/feedbackItemHeader.tsx
@@ -49,6 +49,7 @@ export default function FeedbackItemHeader({eventData, feedbackItem}: Props) {
eventData={eventData}
feedbackItem={feedbackItem}
size={dimensionsToSize(dimensions)}
+ style={{lineHeight: 1}}
/>
diff --git a/static/app/components/feedback/feedbackItem/feedbackItemUsername.spec.tsx b/static/app/components/feedback/feedbackItem/feedbackItemUsername.spec.tsx
index 48bd65fecddca3..4fa250b847f717 100644
--- a/static/app/components/feedback/feedbackItem/feedbackItemUsername.spec.tsx
+++ b/static/app/components/feedback/feedbackItem/feedbackItemUsername.spec.tsx
@@ -1,10 +1,18 @@
import {FeedbackIssueFixture} from 'sentry-fixture/feedbackIssue';
-import {render, screen} from 'sentry-test/reactTestingLibrary';
+import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary';
import FeedbackItemUsername from 'sentry/components/feedback/feedbackItem/feedbackItemUsername';
describe('FeedbackItemUsername', () => {
+ beforeEach(() => {
+ Object.assign(navigator, {
+ clipboard: {
+ writeText: jest.fn().mockResolvedValue(''),
+ },
+ });
+ });
+
it('should fallback to "Anonymous User" when no name/contact_email exist', () => {
const issue = FeedbackIssueFixture({
metadata: {
@@ -88,4 +96,24 @@ describe('FeedbackItemUsername', () => {
expect.stringContaining('mailto:foo@bar.com')
);
});
+
+ it('should copy text and select it on click', async () => {
+ const issue = FeedbackIssueFixture({
+ metadata: {
+ name: 'Foo Bar',
+ contact_email: 'foo@bar.com',
+ },
+ });
+ render( );
+
+ const username = screen.getByText('Foo Bar');
+
+ await userEvent.click(username);
+
+ await waitFor(() => {
+ expect(window.getSelection()?.toString()).toBe('Foo Bar•foo@bar.com');
+ });
+
+ expect(navigator.clipboard.writeText).toHaveBeenCalledWith('Foo Bar ');
+ });
});
diff --git a/static/app/components/feedback/feedbackItem/feedbackItemUsername.tsx b/static/app/components/feedback/feedbackItem/feedbackItemUsername.tsx
index 6f4abca146d595..c65bbf02bcf47c 100644
--- a/static/app/components/feedback/feedbackItem/feedbackItemUsername.tsx
+++ b/static/app/components/feedback/feedbackItem/feedbackItemUsername.tsx
@@ -1,5 +1,4 @@
-import {type CSSProperties, Fragment, useCallback, useRef} from 'react';
-import {findDOMNode} from 'react-dom';
+import {type CSSProperties, Fragment, useCallback, useId} from 'react';
import styled from '@emotion/styled';
import {LinkButton} from 'sentry/components/button';
@@ -29,22 +28,16 @@ export default function FeedbackItemUsername({className, feedbackIssue, style}:
const user = name && email && !isSameNameAndEmail ? `${name} <${email}>` : nameOrEmail;
- const userNodeRef = useRef(null);
+ const userNodeId = useId();
const handleSelectText = useCallback(() => {
- if (!userNodeRef.current) {
- return;
- }
-
- // We use findDOMNode here because `this.userNodeRef` is not a dom node,
- // it's a ref to AutoSelectText
- const node = findDOMNode(userNodeRef.current); // eslint-disable-line react/no-find-dom-node
- if (!node || !(node instanceof HTMLElement)) {
+ const node = document.getElementById(userNodeId);
+ if (!node) {
return;
}
selectText(node);
- }, []);
+ }, [userNodeId]);
const {onClick: handleCopyToClipboard} = useCopyToClipboard({
text: user ?? '',
@@ -65,6 +58,7 @@ export default function FeedbackItemUsername({className, feedbackIssue, style}:
{isSameNameAndEmail ? (
{name ?? email}
diff --git a/static/app/components/feedback/feedbackItem/replayInlineCTAPanel.tsx b/static/app/components/feedback/feedbackItem/replayInlineCTAPanel.tsx
index 1119bde378f097..02af6109d5efa3 100644
--- a/static/app/components/feedback/feedbackItem/replayInlineCTAPanel.tsx
+++ b/static/app/components/feedback/feedbackItem/replayInlineCTAPanel.tsx
@@ -17,10 +17,11 @@ export default function ReplayInlineCTAPanel() {
button={
activateSidebar()}
>
{t('Set Up Now')}
diff --git a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx
index 921adbf3424630..caf622399b91d3 100644
--- a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx
+++ b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx
@@ -163,7 +163,7 @@ function OnboardingContent({currentProject}: {currentProject: Project}) {
value: PlatformKey;
label?: ReactNode;
textValue?: string;
- }>(jsFrameworkSelectOptions[0]);
+ }>(jsFrameworkSelectOptions[0]!);
const defaultTab = 'npm';
const location = useLocation();
@@ -195,7 +195,7 @@ function OnboardingContent({currentProject}: {currentProject: Project}) {
const jsFrameworkPlatform =
replayJsFrameworkOptions().find(p => p.id === jsFramework.value) ??
- replayJsFrameworkOptions()[0];
+ replayJsFrameworkOptions()[0]!;
const {
isLoading,
@@ -261,7 +261,6 @@ function OnboardingContent({currentProject}: {currentProject: Project}) {
]}
value={setupMode()}
onChange={setSetupMode}
- disabledChoices={[['jsLoader', t('Coming soon!')]]}
tooltipPosition={'top-start'}
/>
) : (
@@ -333,8 +332,7 @@ function OnboardingContent({currentProject}: {currentProject: Project}) {
) {
return 'feedbackOnboardingNpm';
}
- // TODO: update this when we add feedback to the loader
- return 'replayOnboardingJsLoader';
+ return 'feedbackOnboardingJsLoader';
}
return (
diff --git a/static/app/components/feedback/feedbackSearch.tsx b/static/app/components/feedback/feedbackSearch.tsx
index 1e1fcc63a95783..5a39973f83e02d 100644
--- a/static/app/components/feedback/feedbackSearch.tsx
+++ b/static/app/components/feedback/feedbackSearch.tsx
@@ -66,7 +66,7 @@ function getFeedbackFilterKeys(supportedTags: TagCollection) {
.map(key => [
key,
{
- ...supportedTags[key],
+ ...supportedTags[key]!,
kind: getFeedbackFieldDefinition(key)?.kind ?? FieldKind.TAG,
},
])
@@ -79,7 +79,7 @@ function getFeedbackFilterKeys(supportedTags: TagCollection) {
// To guarantee ordering, we need to implement filterKeySections.
const keys = Object.keys(allTags);
keys.sort();
- return Object.fromEntries(keys.map(key => [key, allTags[key]]));
+ return Object.fromEntries(keys.map(key => [key, allTags[key]!]));
}
const getFilterKeySections = (tags: TagCollection): FilterKeySection[] => {
@@ -132,9 +132,9 @@ export default function FeedbackSearch() {
useCache: true,
enabled: true,
keepPreviousData: false,
- start: start,
- end: end,
- statsPeriod: statsPeriod,
+ start,
+ end,
+ statsPeriod,
},
{}
);
@@ -164,9 +164,9 @@ export default function FeedbackSearch() {
}
const endpointParams = {
- start: start,
- end: end,
- statsPeriod: statsPeriod,
+ start,
+ end,
+ statsPeriod,
};
return fetchTagValues({
diff --git a/static/app/components/feedback/list/issueTrackingSignals.tsx b/static/app/components/feedback/list/issueTrackingSignals.tsx
index 00ca8a6e8bc7bf..a733eddc484271 100644
--- a/static/app/components/feedback/list/issueTrackingSignals.tsx
+++ b/static/app/components/feedback/list/issueTrackingSignals.tsx
@@ -69,7 +69,7 @@ export default function IssueTrackingSignals({group}: Props) {
);
}
- const issue = linkedIssues[0];
+ const issue = linkedIssues[0]!;
const {name, icon} = {
'plugin-issue': getPluginNames,
'plugin-actions': getPluginNames,
diff --git a/static/app/components/feedback/useDeleteFeedback.tsx b/static/app/components/feedback/useDeleteFeedback.tsx
index 585a43809d002c..27ab5aa44b18d3 100644
--- a/static/app/components/feedback/useDeleteFeedback.tsx
+++ b/static/app/components/feedback/useDeleteFeedback.tsx
@@ -26,7 +26,7 @@ export const useDeleteFeedback = (feedbackIds, projectId) => {
api,
{
orgId: organization.slug,
- projectId: projectId,
+ projectId,
itemIds: feedbackIds,
},
{
diff --git a/static/app/components/feedback/useMutateFeedback.tsx b/static/app/components/feedback/useMutateFeedback.tsx
index bc41d26bd1a44b..bf4821a670b3d7 100644
--- a/static/app/components/feedback/useMutateFeedback.tsx
+++ b/static/app/components/feedback/useMutateFeedback.tsx
@@ -10,7 +10,10 @@ import {fetchMutation, useMutation} from 'sentry/utils/queryClient';
import useApi from 'sentry/utils/useApi';
type TFeedbackIds = 'all' | string[];
-type TPayload = {hasSeen: boolean} | {status: GroupStatus} | {assignedTo: Actor | null};
+type TPayload =
+ | {hasSeen: boolean}
+ | {status: GroupStatus}
+ | {assignedTo: Actor | undefined};
type TData = unknown;
type TError = unknown;
type TVariables = [TFeedbackIds, TPayload];
@@ -78,7 +81,7 @@ export default function useMutateFeedback({
const assign = useCallback(
(
- assignedTo: Actor | null,
+ assignedTo: Actor | undefined,
options?: MutateOptions
) => {
mutation.mutate([feedbackIds, {assignedTo}], options);
diff --git a/static/app/components/feedback/widget/types.ts b/static/app/components/feedback/widget/types.ts
index 8c3ef9b01c8292..a53a611c3a877e 100644
--- a/static/app/components/feedback/widget/types.ts
+++ b/static/app/components/feedback/widget/types.ts
@@ -1,4 +1,4 @@
-import type {Event} from '@sentry/types';
+import type {Event} from '@sentry/core';
/**
* NOTE: These types are still considered Beta and subject to change.
diff --git a/static/app/components/forms/controls/selectControl.tsx b/static/app/components/forms/controls/selectControl.tsx
index b5242fdd6e90e9..081032fa8d9be1 100644
--- a/static/app/components/forms/controls/selectControl.tsx
+++ b/static/app/components/forms/controls/selectControl.tsx
@@ -41,7 +41,7 @@ function isGroupedOptions(
if (!maybe || maybe.length === 0) {
return false;
}
- return (maybe as GroupedOptionsType)[0].options !== undefined;
+ return (maybe as GroupedOptionsType)[0]!.options !== undefined;
}
function ClearIndicator(
diff --git a/static/app/components/forms/fields/choiceMapperField.tsx b/static/app/components/forms/fields/choiceMapperField.tsx
index bc217c728d0753..97af15f23c04c2 100644
--- a/static/app/components/forms/fields/choiceMapperField.tsx
+++ b/static/app/components/forms/fields/choiceMapperField.tsx
@@ -148,7 +148,6 @@ export default class ChoiceMapperField extends Component
};
const removeRow = (itemKey: string) => {
- // eslint-disable-next-line no-unused-vars
saveChanges(
Object.fromEntries(Object.entries(value).filter(([key, _]) => key !== itemKey))
);
@@ -220,7 +219,7 @@ export default class ChoiceMapperField extends Component
{
]}
/>
);
- await userEvent.click(screen.getAllByLabelText('Delete')[0]);
+ await userEvent.click(screen.getAllByLabelText('Delete')[0]!);
expect(defaultProps.onBlur).toHaveBeenCalledWith([[24, 1]], []);
expect(defaultProps.onChange).toHaveBeenCalledWith([[24, 1]], []);
diff --git a/static/app/components/forms/fields/projectMapperField.tsx b/static/app/components/forms/fields/projectMapperField.tsx
index 92f79bebb25c17..33096362a79ca2 100644
--- a/static/app/components/forms/fields/projectMapperField.tsx
+++ b/static/app/components/forms/fields/projectMapperField.tsx
@@ -67,7 +67,7 @@ export class RenderField extends Component {
if (newProjects.length === 1) {
this.setState({
- selectedSentryProjectId: newProjects[0],
+ selectedSentryProjectId: newProjects[0]!,
});
}
}
diff --git a/static/app/components/forms/fields/selectField.tsx b/static/app/components/forms/fields/selectField.tsx
index 23510bfef01414..bf2d1d461659c8 100644
--- a/static/app/components/forms/fields/selectField.tsx
+++ b/static/app/components/forms/fields/selectField.tsx
@@ -120,7 +120,8 @@ export default class SelectField> extends Co
placeholder,
...props
}) => {
- const showTempNoneOption = !multiple && !props.value;
+ const showTempNoneOption =
+ !multiple && (props.value === undefined || props.value === null);
return (
diff --git a/static/app/components/forms/fields/sentryMemberTeamSelectorField.spec.tsx b/static/app/components/forms/fields/sentryMemberTeamSelectorField.spec.tsx
index 75cd94a07ee326..a68cda0ca8d1c7 100644
--- a/static/app/components/forms/fields/sentryMemberTeamSelectorField.spec.tsx
+++ b/static/app/components/forms/fields/sentryMemberTeamSelectorField.spec.tsx
@@ -50,7 +50,7 @@ describe('SentryMemberTeamSelectorField', () => {
await selectEvent.select(
screen.getByRole('textbox', {name: 'Select Owner'}),
- `#${mockTeams[0].slug}`
+ `#${mockTeams[0]!.slug}`
);
expect(mock).toHaveBeenCalledWith('team:1', expect.anything());
@@ -92,7 +92,7 @@ describe('SentryMemberTeamSelectorField', () => {
await selectEvent.select(
screen.getByRole('textbox', {name: 'Select Owner'}),
- mockUsers[0].name
+ mockUsers[0]!.name
);
expect(mock).toHaveBeenCalledWith('user:1', expect.anything());
@@ -114,11 +114,11 @@ describe('SentryMemberTeamSelectorField', () => {
await selectEvent.select(
screen.getByRole('textbox', {name: 'Select Owner'}),
- mockUsers[0].name
+ mockUsers[0]!.name
);
await selectEvent.select(
screen.getByRole('textbox', {name: 'Select Owner'}),
- `#${mockTeams[0].slug}`
+ `#${mockTeams[0]!.slug}`
);
expect(mock).toHaveBeenCalledWith(['user:1', 'team:1'], expect.anything());
diff --git a/static/app/components/forms/formField/index.spec.tsx b/static/app/components/forms/formField/index.spec.tsx
index 5935a53f1787d5..6c1e75f5e9cf96 100644
--- a/static/app/components/forms/formField/index.spec.tsx
+++ b/static/app/components/forms/formField/index.spec.tsx
@@ -5,7 +5,7 @@ import Form from 'sentry/components/forms/form';
import FormModel from 'sentry/components/forms/model';
describe('FormField + model', function () {
- let model;
+ let model!: FormModel;
beforeEach(function () {
model = new FormModel();
diff --git a/static/app/components/forms/jsonForm.spec.tsx b/static/app/components/forms/jsonForm.spec.tsx
index 65cf14be51582c..7dc25a8d75fd6c 100644
--- a/static/app/components/forms/jsonForm.spec.tsx
+++ b/static/app/components/forms/jsonForm.spec.tsx
@@ -6,7 +6,7 @@ import JsonForm from 'sentry/components/forms/jsonForm';
import accountDetailsFields from 'sentry/data/forms/accountDetails';
import {fields} from 'sentry/data/forms/projectGeneralSettings';
-import type {JsonFormObject} from './types';
+import type {FieldObject, JsonFormObject} from './types';
const user = UserFixture();
@@ -104,7 +104,6 @@ describe('JsonForm', function () {
});
it('missing additionalFieldProps required in "valid" prop', function () {
- // eslint-disable-next-line no-console
jest.spyOn(console, 'error').mockImplementation(jest.fn());
try {
render( );
@@ -168,19 +167,18 @@ describe('JsonForm', function () {
});
describe('fields prop', function () {
- const jsonFormFields = [fields.name, fields.platform];
+ const jsonFormFields = [fields.name, fields.platform] as FieldObject[];
it('default', function () {
render( );
});
it('missing additionalFieldProps required in "valid" prop', function () {
- // eslint-disable-next-line no-console
jest.spyOn(console, 'error').mockImplementation(jest.fn());
try {
render(
!!test.email}]}
+ fields={[{...jsonFormFields[0]!, visible: ({test}) => !!test.email}]}
/>
);
} catch (error) {
@@ -192,7 +190,7 @@ describe('JsonForm', function () {
it('should NOT hide panel, if at least one field has visible set to true - no visible prop', function () {
// slug and platform have no visible prop, that means they will be always visible
- render( );
+ render( );
expect(screen.getByText('Account Details')).toBeInTheDocument();
expect(screen.getAllByRole('textbox')).toHaveLength(2);
@@ -202,8 +200,8 @@ describe('JsonForm', function () {
// slug and platform have no visible prop, that means they will be always visible
render(
({...field, visible: true}))}
+ title={accountDetailsFields[0]!.title}
+ fields={jsonFormFields.map(field => ({...field!, visible: true}))}
/>
);
@@ -215,8 +213,8 @@ describe('JsonForm', function () {
// slug and platform have no visible prop, that means they will be always visible
render(
({...field, visible: () => true}))}
+ title={accountDetailsFields[0]!.title}
+ fields={jsonFormFields.map(field => ({...field!, visible: () => true}))}
/>
);
@@ -228,8 +226,8 @@ describe('JsonForm', function () {
// slug and platform have no visible prop, that means they will be always visible
render(
({...field, visible: false}))}
+ title={accountDetailsFields[0]!.title}
+ fields={jsonFormFields.map(field => ({...field!, visible: false}))}
/>
);
@@ -240,8 +238,8 @@ describe('JsonForm', function () {
// slug and platform have no visible prop, that means they will be always visible
render(
({...field, visible: () => false}))}
+ title={accountDetailsFields[0]!.title}
+ fields={jsonFormFields.map(field => ({...field!, visible: () => false}))}
/>
);
diff --git a/static/app/components/forms/model.tsx b/static/app/components/forms/model.tsx
index 7b4d98a2e54689..bc506b1267e37b 100644
--- a/static/app/components/forms/model.tsx
+++ b/static/app/components/forms/model.tsx
@@ -484,7 +484,7 @@ class FormModel {
}
this.snapshots.shift();
- this.fields.replace(this.snapshots[0]);
+ this.fields.replace(this.snapshots[0]!);
return true;
}
diff --git a/static/app/components/globalDrawer/components.tsx b/static/app/components/globalDrawer/components.tsx
index 97787f8aebfb9d..75068f4501558c 100644
--- a/static/app/components/globalDrawer/components.tsx
+++ b/static/app/components/globalDrawer/components.tsx
@@ -70,7 +70,7 @@ interface DrawerHeaderProps {
hideCloseButton?: boolean;
}
-export const DrawerHeader = forwardRef(function _DrawerHeader(
+export const DrawerHeader = forwardRef(function DrawerHeaderInner(
{
className,
children = null,
diff --git a/static/app/components/gridEditable/index.tsx b/static/app/components/gridEditable/index.tsx
index e28ef18e1e8f55..66411bdac8168e 100644
--- a/static/app/components/gridEditable/index.tsx
+++ b/static/app/components/gridEditable/index.tsx
@@ -178,7 +178,7 @@ class GridEditable<
clearWindowLifecycleEvents() {
Object.keys(this.resizeWindowLifecycleEvents).forEach(e => {
- this.resizeWindowLifecycleEvents[e].forEach(c => window.removeEventListener(e, c));
+ this.resizeWindowLifecycleEvents[e]!.forEach(c => window.removeEventListener(e, c));
this.resizeWindowLifecycleEvents[e] = [];
});
}
@@ -188,7 +188,7 @@ class GridEditable<
const nextColumnOrder = [...this.props.columnOrder];
nextColumnOrder[i] = {
- ...nextColumnOrder[i],
+ ...nextColumnOrder[i]!,
width: COL_WIDTH_UNDEFINED,
};
this.setGridTemplateColumns(nextColumnOrder);
@@ -196,7 +196,7 @@ class GridEditable<
const onResizeColumn = this.props.grid.onResizeColumn;
if (onResizeColumn) {
onResizeColumn(i, {
- ...nextColumnOrder[i],
+ ...nextColumnOrder[i]!,
width: COL_WIDTH_UNDEFINED,
});
}
@@ -224,10 +224,10 @@ class GridEditable<
};
window.addEventListener('mousemove', this.onResizeMouseMove);
- this.resizeWindowLifecycleEvents.mousemove.push(this.onResizeMouseMove);
+ this.resizeWindowLifecycleEvents.mousemove!.push(this.onResizeMouseMove);
window.addEventListener('mouseup', this.onResizeMouseUp);
- this.resizeWindowLifecycleEvents.mouseup.push(this.onResizeMouseUp);
+ this.resizeWindowLifecycleEvents.mouseup!.push(this.onResizeMouseUp);
};
onResizeMouseUp = (e: MouseEvent) => {
@@ -239,7 +239,7 @@ class GridEditable<
const widthChange = e.clientX - metadata.cursorX;
onResizeColumn(metadata.columnIndex, {
- ...columnOrder[metadata.columnIndex],
+ ...columnOrder[metadata.columnIndex]!,
width: metadata.columnWidth + widthChange,
});
}
@@ -267,7 +267,7 @@ class GridEditable<
const nextColumnOrder = [...this.props.columnOrder];
nextColumnOrder[metadata.columnIndex] = {
- ...nextColumnOrder[metadata.columnIndex],
+ ...nextColumnOrder[metadata.columnIndex]!,
width: Math.max(metadata.columnWidth + widthChange, 0),
};
diff --git a/static/app/components/gridEditable/sortLink.tsx b/static/app/components/gridEditable/sortLink.tsx
index 702036cf35511d..ac836929e17732 100644
--- a/static/app/components/gridEditable/sortLink.tsx
+++ b/static/app/components/gridEditable/sortLink.tsx
@@ -3,7 +3,7 @@ import type {LocationDescriptorObject} from 'history';
import Link from 'sentry/components/links/link';
import {IconArrow} from 'sentry/icons';
-import {browserHistory} from 'sentry/utils/browserHistory';
+import {useNavigate} from 'sentry/utils/useNavigate';
export type Alignments = 'left' | 'right' | undefined;
export type Directions = 'desc' | 'asc' | undefined;
@@ -28,6 +28,7 @@ function SortLink({
replace,
}: Props) {
const target = generateSortLink();
+ const navigate = useNavigate();
if (!target || !canSort) {
return {title} ;
@@ -40,7 +41,7 @@ function SortLink({
const handleOnClick: React.MouseEventHandler = e => {
if (replace) {
e.preventDefault();
- browserHistory.replace(target);
+ navigate(target, {replace: true});
}
onClick?.(e);
};
diff --git a/static/app/components/group/assignedTo.tsx b/static/app/components/group/assignedTo.tsx
index 5bb43bfa54c18f..a7b576a583342c 100644
--- a/static/app/components/group/assignedTo.tsx
+++ b/static/app/components/group/assignedTo.tsx
@@ -92,7 +92,7 @@ function getSuggestedReason(owner: IssueOwner) {
}
if (owner.rules?.length) {
- const firstRule = owner.rules[0];
+ const firstRule = owner.rules[0]!;
return `${toTitleCase(firstRule[0])}:${firstRule[1]}`;
}
diff --git a/static/app/components/group/externalIssueForm.spec.tsx b/static/app/components/group/externalIssueForm.spec.tsx
index 1ac255859ba244..98559091df17c2 100644
--- a/static/app/components/group/externalIssueForm.spec.tsx
+++ b/static/app/components/group/externalIssueForm.spec.tsx
@@ -11,8 +11,8 @@ import ExternalIssueForm from 'sentry/components/group/externalIssueForm';
jest.mock('lodash/debounce', () => {
const debounceMap = new Map();
const mockDebounce =
- (fn, timeout) =>
- (...args) => {
+ (fn: (...args: any[]) => void, timeout: number) =>
+ (...args: any[]) => {
if (debounceMap.has(fn)) {
clearTimeout(debounceMap.get(fn));
}
@@ -28,7 +28,9 @@ jest.mock('lodash/debounce', () => {
});
describe('ExternalIssueForm', () => {
- let group, integration, formConfig;
+ let group!: ReturnType;
+ let integration!: ReturnType;
+ let formConfig!: any;
const onChange = jest.fn();
beforeEach(() => {
MockApiClient.clearMockResponses();
@@ -48,7 +50,7 @@ describe('ExternalIssueForm', () => {
match: [MockApiClient.matchQuery({action: 'create'})],
});
- const styledWrapper = styled(c => c.children);
+ const styledWrapper = styled((c: {children: React.ReactNode}) => c.children);
const wrapper = render(
{
});
});
describe('link', () => {
- let externalIssueField, getFormConfigRequest;
+ let externalIssueField!: any;
+ let getFormConfigRequest!: jest.Mock;
beforeEach(() => {
externalIssueField = {
name: 'externalIssue',
diff --git a/static/app/components/group/externalIssueForm.tsx b/static/app/components/group/externalIssueForm.tsx
index df53acb18a931c..44d41805f05e5c 100644
--- a/static/app/components/group/externalIssueForm.tsx
+++ b/static/app/components/group/externalIssueForm.tsx
@@ -1,5 +1,5 @@
+import type {Span} from '@sentry/core';
import * as Sentry from '@sentry/react';
-import type {Span} from '@sentry/types';
import {addSuccessMessage} from 'sentry/actionCreators/indicator';
import type DeprecatedAsyncComponent from 'sentry/components/deprecatedAsyncComponent';
diff --git a/static/app/components/group/externalIssuesList/externalIssueActions.tsx b/static/app/components/group/externalIssuesList/externalIssueActions.tsx
index 115a8799da0420..cb4c8182250b34 100644
--- a/static/app/components/group/externalIssuesList/externalIssueActions.tsx
+++ b/static/app/components/group/externalIssuesList/externalIssueActions.tsx
@@ -75,7 +75,7 @@ function ExternalIssueActions({configurations, group, onChange}: Props) {
const {externalIssues} = integration;
// Currently we do not support a case where there is multiple external issues.
// For example, we shouldn't have more than 1 jira ticket created for an issue for each jira configuration.
- const issue = externalIssues[0];
+ const issue = externalIssues[0]!;
const {id} = issue;
const endpoint = `/organizations/${organization.slug}/issues/${group.id}/integrations/${integration.id}/?externalIssue=${id}`;
@@ -97,7 +97,7 @@ function ExternalIssueActions({configurations, group, onChange}: Props) {
{linked.map(config => {
const {provider, externalIssues} = config;
- const issue = externalIssues[0];
+ const issue = externalIssues[0]!;
return (
0 && (
{unlinked.map(config => (
@@ -148,7 +148,7 @@ function ExternalIssueActions({configurations, group, onChange}: Props) {
? () =>
doOpenExternalIssueModal({
group,
- integration: unlinked[0],
+ integration: unlinked[0]!,
onChange,
organization,
})
diff --git a/static/app/components/group/externalIssuesList/hooks/useIntegrationExternalIssues.tsx b/static/app/components/group/externalIssuesList/hooks/useIntegrationExternalIssues.tsx
index aa10ecaec532a7..74b860ed629809 100644
--- a/static/app/components/group/externalIssuesList/hooks/useIntegrationExternalIssues.tsx
+++ b/static/app/components/group/externalIssuesList/hooks/useIntegrationExternalIssues.tsx
@@ -84,11 +84,11 @@ export function useIntegrationExternalIssues({
...configurations
.filter(config => config.externalIssues.length > 0)
.map(config => ({
- key: config.externalIssues[0].id,
- displayName: config.externalIssues[0].key,
+ key: config.externalIssues[0]!.id,
+ displayName: config.externalIssues[0]!.key,
displayIcon,
- url: config.externalIssues[0].url,
- title: config.externalIssues[0].title,
+ url: config.externalIssues[0]!.url,
+ title: config.externalIssues[0]!.title,
onUnlink: () => {
// Currently we do not support a case where there is multiple external issues.
// For example, we shouldn't have more than 1 jira ticket created for an issue for each jira configuration.
@@ -98,7 +98,7 @@ export function useIntegrationExternalIssues({
`/organizations/${organization.slug}/issues/${group.id}/integrations/${config.id}/`,
{
method: 'DELETE',
- query: {externalIssue: issue.id},
+ query: {externalIssue: issue!.id},
success: () => {
addSuccessMessage(t('Successfully unlinked issue.'));
refetchIntegrations();
diff --git a/static/app/components/group/groupSummary.spec.tsx b/static/app/components/group/groupSummary.spec.tsx
index cfb553be95b652..ce57ddae2bd181 100644
--- a/static/app/components/group/groupSummary.spec.tsx
+++ b/static/app/components/group/groupSummary.spec.tsx
@@ -54,12 +54,12 @@ describe('GroupSummary', function () {
await waitFor(() => {
expect(screen.getByText("What's wrong")).toBeInTheDocument();
- expect(screen.getByText('Test whats wrong')).toBeInTheDocument();
- expect(screen.getByText('In the trace')).toBeInTheDocument();
- expect(screen.getByText('Test trace')).toBeInTheDocument();
- expect(screen.getByText('Possible cause')).toBeInTheDocument();
- expect(screen.getByText('Test possible cause')).toBeInTheDocument();
});
+ expect(screen.getByText('Test whats wrong')).toBeInTheDocument();
+ expect(screen.getByText('In the trace')).toBeInTheDocument();
+ expect(screen.getByText('Test trace')).toBeInTheDocument();
+ expect(screen.getByText('Possible cause')).toBeInTheDocument();
+ expect(screen.getByText('Test possible cause')).toBeInTheDocument();
});
it('shows loading state', function () {
@@ -110,11 +110,11 @@ describe('GroupSummary', function () {
await waitFor(() => {
expect(screen.getByText("What's wrong")).toBeInTheDocument();
- expect(screen.getByText('Test whats wrong')).toBeInTheDocument();
- expect(screen.queryByText('In the trace')).not.toBeInTheDocument();
- expect(screen.getByText('Possible cause')).toBeInTheDocument();
- expect(screen.getByText('Test possible cause')).toBeInTheDocument();
});
+ expect(screen.getByText('Test whats wrong')).toBeInTheDocument();
+ expect(screen.queryByText('In the trace')).not.toBeInTheDocument();
+ expect(screen.getByText('Possible cause')).toBeInTheDocument();
+ expect(screen.getByText('Test possible cause')).toBeInTheDocument();
});
it('renders in preview mode', async function () {
@@ -131,7 +131,7 @@ describe('GroupSummary', function () {
await waitFor(() => {
expect(screen.getByText("What's wrong")).toBeInTheDocument();
- expect(screen.getByText('Test whats wrong')).toBeInTheDocument();
});
+ expect(screen.getByText('Test whats wrong')).toBeInTheDocument();
});
});
diff --git a/static/app/components/group/groupSummary.tsx b/static/app/components/group/groupSummary.tsx
index 67b204f318e1f1..195123825f03a3 100644
--- a/static/app/components/group/groupSummary.tsx
+++ b/static/app/components/group/groupSummary.tsx
@@ -1,10 +1,9 @@
-import {useEffect, useRef, useState} from 'react';
+import {useEffect, useState} from 'react';
import styled from '@emotion/styled';
-import {Button} from 'sentry/components/button';
-import Link from 'sentry/components/links/link';
+import {DropdownMenu} from 'sentry/components/dropdownMenu';
import Placeholder from 'sentry/components/placeholder';
-import {IconEllipsis, IconFatal, IconFocus, IconRefresh, IconSpan} from 'sentry/icons';
+import {IconEllipsis, IconFatal, IconFocus, IconSpan} from 'sentry/icons';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import type {Event} from 'sentry/types/event';
@@ -13,6 +12,7 @@ import type {Project} from 'sentry/types/project';
import marked from 'sentry/utils/marked';
import {type ApiQueryKey, useApiQuery, useQueryClient} from 'sentry/utils/queryClient';
import normalizeUrl from 'sentry/utils/url/normalizeUrl';
+import {useFeedbackForm} from 'sentry/utils/useFeedbackForm';
import useOrganization from 'sentry/utils/useOrganization';
import {useAiConfig} from 'sentry/views/issueDetails/streamline/hooks/useAiConfig';
@@ -90,7 +90,7 @@ export function GroupSummary({
}) {
const organization = useOrganization();
const [forceEvent, setForceEvent] = useState(false);
- const [showEventDetails, setShowEventDetails] = useState(false);
+ const openFeedbackForm = useFeedbackForm();
const {data, isPending, isError, refresh} = useGroupSummary(
group,
event,
@@ -98,9 +98,6 @@ export function GroupSummary({
forceEvent
);
- const popupRef = useRef(null);
- const buttonRef = useRef(null);
-
useEffect(() => {
if (forceEvent && !isPending) {
refresh();
@@ -108,58 +105,51 @@ export function GroupSummary({
}
}, [forceEvent, isPending, refresh]);
- useEffect(() => {
- function handleClickOutside(e: MouseEvent) {
- if (
- showEventDetails &&
- popupRef.current &&
- buttonRef.current &&
- !popupRef.current.contains(e.target as Node) &&
- !buttonRef.current.contains(e.target as Node)
- ) {
- setShowEventDetails(false);
- }
- }
-
- document.addEventListener('click', handleClickOutside);
- return () => {
- document.removeEventListener('click', handleClickOutside);
- };
- }, [showEventDetails]);
-
- const tooltipContent = data?.eventId ? (
- event?.id === data.eventId ? (
- t('Based on this event')
- ) : (
-
-
- {t('Based on event ')}
-
- {data.eventId.substring(0, 8)}
-
-
- }
- busy={isPending}
- aria-label={t('Summarize this event instead')}
- title={t('Summarize this event instead')}
- onClick={() => {
- setForceEvent(true);
- }}
- />
-
- )
- ) : (
- ''
- );
+ const eventDetailsItems = [
+ {
+ key: 'event-info',
+ label:
+ event?.id === data?.eventId ? (
+ t('Based on this event')
+ ) : (
+ {t('See original event (%s)', data?.eventId?.substring(0, 8))}
+ ),
+ to:
+ event?.id === data?.eventId
+ ? undefined
+ : window.location.origin +
+ normalizeUrl(
+ `/organizations/${organization.slug}/issues/${data?.groupId}/events/${data?.eventId}/`
+ ),
+ },
+ ...(event?.id !== data?.eventId
+ ? [
+ {
+ key: 'refresh',
+ label: t('Summarize this event instead'),
+ onAction: () => setForceEvent(true),
+ disabled: isPending,
+ },
+ ]
+ : []),
+ ...(openFeedbackForm
+ ? [
+ {
+ key: 'feedback',
+ label: t('Give feedback'),
+ onAction: () => {
+ openFeedbackForm({
+ messagePlaceholder: t('How can we make Issue Summary better for you?'),
+ tags: {
+ ['feedback.source']: 'issue_details_ai_autofix',
+ ['feedback.owner']: 'ml-ai',
+ },
+ });
+ },
+ },
+ ]
+ : []),
+ ];
const insightCards = [
{
@@ -190,18 +180,20 @@ export function GroupSummary({
{isError ? {t('Error loading summary')}
: null}
{data?.eventId && !isPending && (
-
- }
- aria-label={t('Event details')}
- borderless
- onClick={() => setShowEventDetails(!showEventDetails)}
+
+ ,
+ 'aria-label': t('Event details'),
+ size: 'xs',
+ borderless: true,
+ showChevron: false,
+ }}
+ isDisabled={isPending}
+ position="bottom-end"
+ offset={4}
/>
- {showEventDetails && (
- {tooltipContent}
- )}
)}
@@ -264,7 +256,6 @@ const InsightCard = styled('div')`
display: flex;
flex-direction: column;
border-radius: ${p => p.theme.borderRadius};
- background: ${p => p.theme.background};
width: 100%;
min-height: 0;
`;
@@ -323,40 +314,12 @@ const CardContent = styled('div')`
flex: 1;
`;
-const TooltipWrapper = styled('div')`
+const TooltipWrapper = styled('div')<{preview?: boolean}>`
position: absolute;
- top: 0;
+ top: ${p => (p.preview ? `-32px` : `-${space(0.5)}`)};
right: 0;
`;
-const EventLink = styled(Link)`
- color: ${p => p.theme.linkColor};
- :hover {
- color: ${p => p.theme.linkHoverColor};
- }
-`;
-
-const TooltipContentWrapper = styled('div')`
- display: flex;
- flex-direction: row;
- align-items: center;
- gap: ${space(1)};
-`;
-
-const EventDetailsPopup = styled('div')`
- position: absolute;
- right: calc(100% + ${space(0.5)});
- top: 50%;
- transform: translateY(-50%);
- padding: ${space(1.5)};
- background: ${p => p.theme.background};
- border: 1px solid ${p => p.theme.border};
- border-radius: ${p => p.theme.borderRadius};
- box-shadow: ${p => p.theme.dropShadowHeavy};
- z-index: 0;
- white-space: nowrap;
-`;
-
const StyledIconEllipsis = styled(IconEllipsis)`
color: ${p => p.theme.subText};
`;
diff --git a/static/app/components/group/releaseChart.spec.tsx b/static/app/components/group/releaseChart.spec.tsx
index 2b07253fe25551..4dd8ae1d4d1ad3 100644
--- a/static/app/components/group/releaseChart.spec.tsx
+++ b/static/app/components/group/releaseChart.spec.tsx
@@ -20,6 +20,6 @@ it('should set marker before first bucket', () => {
const markers = getGroupReleaseChartMarkers(theme as any, data, firstSeen, lastSeen)!.data!;
expect((markers[0] as any).displayValue).toBe(new Date(firstSeen).getTime());
- expect(markers[0].coord![0]).toBe(1659533400000);
- expect(markers[1].coord![0]).toBe(new Date(lastSeen).getTime());
+ expect(markers[0]!.coord![0]).toBe(1659533400000);
+ expect(markers[1]!.coord![0]).toBe(new Date(lastSeen).getTime());
});
diff --git a/static/app/components/group/releaseChart.tsx b/static/app/components/group/releaseChart.tsx
index aefd6becd43798..38d08d207e29e0 100644
--- a/static/app/components/group/releaseChart.tsx
+++ b/static/app/components/group/releaseChart.tsx
@@ -47,7 +47,7 @@ export function getGroupReleaseChartMarkers(
): BarChartSeries['markPoint'] {
const markers: Marker[] = [];
// Get the timestamp of the first point.
- const firstGraphTime = stats[0][0] * 1000;
+ const firstGraphTime = stats[0]![0] * 1000;
const firstSeenX = new Date(firstSeen ?? 0).getTime();
const lastSeenX = new Date(lastSeen ?? 0).getTime();
@@ -67,9 +67,9 @@ export function getGroupReleaseChartMarkers(
let bucketStart: number | undefined;
if (firstBucket > 0) {
// The size of the data interval in ms
- const halfBucketSize = ((stats[1][0] - stats[0][0]) * 1000) / 2;
+ const halfBucketSize = ((stats[1]![0] - stats[0]![0]) * 1000) / 2;
// Display the marker in front of the first bucket
- bucketStart = stats[firstBucket - 1][0] * 1000 - halfBucketSize;
+ bucketStart = stats[firstBucket - 1]![0] * 1000 - halfBucketSize;
}
markers.push({
@@ -156,26 +156,26 @@ function GroupReleaseChart(props: Props) {
series.push({
seriesName: t('Events in %s', environmentLabel),
- data: environmentStats[statsPeriod].map(point => ({
- name: point[0] * 1000,
- value: point[1],
+ data: environmentStats[statsPeriod]!.map(point => ({
+ name: point![0] * 1000,
+ value: point![1],
})),
});
if (release && releaseStats) {
series.push({
seriesName: t('Events in release %s', formatVersion(release.version)),
- data: releaseStats[statsPeriod].map(point => ({
- name: point[0] * 1000,
- value: point[1],
+ data: releaseStats[statsPeriod]!.map(point => ({
+ name: point![0] * 1000,
+ value: point![1],
})),
});
}
const totalSeries =
environment && environmentStats ? environmentStats[statsPeriod] : stats;
- const totalEvents = totalSeries.reduce((acc, current) => acc + current[1], 0);
- series[0].markPoint = getGroupReleaseChartMarkers(theme, stats, firstSeen, lastSeen);
+ const totalEvents = totalSeries!.reduce((acc, current) => acc + current[1], 0);
+ series[0]!.markPoint = getGroupReleaseChartMarkers(theme, stats, firstSeen, lastSeen);
return (
diff --git a/static/app/components/group/sentryAppExternalIssueModal.tsx b/static/app/components/group/sentryAppExternalIssueModal.tsx
index d0bdf650b5458d..c0d273a808cbef 100644
--- a/static/app/components/group/sentryAppExternalIssueModal.tsx
+++ b/static/app/components/group/sentryAppExternalIssueModal.tsx
@@ -1,4 +1,4 @@
-import {Component, Fragment} from 'react';
+import {Fragment, useState} from 'react';
import type {ModalRenderProps} from 'sentry/actionCreators/modal';
import SentryAppExternalIssueForm from 'sentry/components/group/sentryAppExternalIssueForm';
@@ -15,58 +15,57 @@ type Props = ModalRenderProps & {
sentryAppInstallation: SentryAppInstallation;
};
-type State = {
- action: 'create' | 'link';
-};
-
-class SentryAppExternalIssueModal extends Component {
- state: State = {
- action: 'create',
- };
+function SentryAppExternalIssueModal(props: Props) {
+ const [action, setAction] = useState<'create' | 'link'>('create');
+ const {
+ Header,
+ Body,
+ sentryAppComponent,
+ sentryAppInstallation,
+ group,
+ closeModal,
+ event,
+ } = props;
- showLink = () => {
- this.setState({action: 'link'});
+ const showLink = () => {
+ setAction('link');
};
- showCreate = () => {
- this.setState({action: 'create'});
+ const showCreate = () => {
+ setAction('create');
};
- onSubmitSuccess = () => {
- this.props.closeModal();
+ const onSubmitSuccess = () => {
+ closeModal();
};
- render() {
- const {Header, Body, sentryAppComponent, sentryAppInstallation, group} = this.props;
- const {action} = this.state;
- const name = sentryAppComponent.sentryApp.name;
- const config = sentryAppComponent.schema[action];
+ const name = sentryAppComponent.sentryApp.name;
+ const config = sentryAppComponent.schema[action];
- return (
-
- {tct('[name] Issue', {name})}
-
-
- {t('Create')}
-
-
- {t('Link')}
-
-
-
-
-
-
- );
- }
+ return (
+
+ {tct('[name] Issue', {name})}
+
+
+ {t('Create')}
+
+
+ {t('Link')}
+
+
+
+
+
+
+ );
}
export default SentryAppExternalIssueModal;
diff --git a/static/app/components/group/suggestedOwnerHovercard.tsx b/static/app/components/group/suggestedOwnerHovercard.tsx
index 1bb0108c0684c8..040ccd18990772 100644
--- a/static/app/components/group/suggestedOwnerHovercard.tsx
+++ b/static/app/components/group/suggestedOwnerHovercard.tsx
@@ -1,4 +1,4 @@
-import {Component, Fragment} from 'react';
+import {Fragment, useState} from 'react';
import styled from '@emotion/styled';
import moment from 'moment-timezone';
@@ -48,160 +48,144 @@ type Props = {
rules?: any[] | null;
};
-type State = {
- commitsExpanded: boolean;
- rulesExpanded: boolean;
-};
+function SuggestedOwnerHovercard(props: Props) {
+ const [commitsExpanded, setCommitsExpanded] = useState(false);
+ const [rulesExpanded, setRulesExpanded] = useState(false);
-class SuggestedOwnerHovercard extends Component {
- state: State = {
- commitsExpanded: false,
- rulesExpanded: false,
+ const {organization, actor, commits, rules, release, projectId} = props;
+ const modalData = {
+ initialData: [
+ {
+ emails: actor.email ? new Set([actor.email]) : new Set([]),
+ },
+ ],
+ source: 'suggested_assignees',
};
- render() {
- const {organization, actor, commits, rules, release, projectId, ...props} =
- this.props;
- const {commitsExpanded, rulesExpanded} = this.state;
- const modalData = {
- initialData: [
- {
- emails: actor.email ? new Set([actor.email]) : new Set([]),
- },
- ],
- source: 'suggested_assignees',
- };
-
- return (
-
-
-
- {actor.name || actor.email}
-
- {actor.id === undefined && (
-
- {tct(
- 'The email [actorEmail] is not a member of your organization. [inviteUser:Invite] them or link additional emails in [accountSettings:account settings].',
- {
- actorEmail: {actor.email} ,
- accountSettings: ,
- inviteUser: openInviteMembersModal(modalData)} />,
- }
- )}
-
- )}
-
- }
- body={
-
- {commits !== undefined && !release && (
-
-
- {t('Commits')}
-
-
- {commits
- .slice(0, commitsExpanded ? commits.length : 3)
- .map(({message, dateCreated}, i) => (
-
-
-
-
- ))}
-
- {commits.length > 3 && !commitsExpanded ? (
- this.setState({commitsExpanded: true})}
- >
- {t('View more')}
-
- ) : null}
-
- )}
- {commits !== undefined && release && (
-
-
- {t('Suspect Release')}
-
-
-
-
-
- {tct('[actor] [verb] [commits] in [release]', {
- actor: actor.name,
- verb: commits.length > 1 ? t('made') : t('last committed'),
- commits:
- commits.length > 1 ? (
- // Link to release commits
-
- {t('%s commits', commits.length)}
-
- ) : (
-
- ),
- release: (
-
+ return (
+
+
+
+ {actor.name || actor.email}
+
+ {actor.id === undefined && (
+
+ {tct(
+ 'The email [actorEmail] is not a member of your organization. [inviteUser:Invite] them or link additional emails in [accountSettings:account settings].',
+ {
+ actorEmail: {actor.email} ,
+ accountSettings: ,
+ inviteUser: openInviteMembersModal(modalData)} />,
+ }
+ )}
+
+ )}
+
+ }
+ body={
+
+ {commits !== undefined && !release && (
+
+
+ {t('Commits')}
+
+
+ {commits
+ .slice(0, commitsExpanded ? commits.length : 3)
+ .map(({message, dateCreated}, i) => (
+
+
+
+
+ ))}
+
+ {commits.length > 3 && !commitsExpanded ? (
+ setCommitsExpanded(true)}
+ >
+ {t('View more')}
+
+ ) : null}
+
+ )}
+ {commits !== undefined && release && (
+
+
+ {t('Suspect Release')}
+
+
+
+
+
+ {tct('[actor] [verb] [commits] in [release]', {
+ actor: actor.name,
+ verb: commits.length > 1 ? t('made') : t('last committed'),
+ commits:
+ commits.length > 1 ? (
+ // Link to release commits
+
+ {t('%s commits', commits.length)}
+
+ ) : (
+
),
- })}
-
-
-
-
- )}
- {defined(rules) && (
-
-
- {t('Matching Ownership Rules')}
-
-
- {rules
- .slice(0, rulesExpanded ? rules.length : 3)
- .map(([type, matched], i) => (
-
-
- {matched}
-
- ))}
-
- {rules.length > 3 && !rulesExpanded ? (
- this.setState({rulesExpanded: true})}
- >
- {t('View more')}
-
- ) : null}
-
- )}
-
- }
- {...props}
- />
- );
- }
+ release: (
+
+ ),
+ })}
+
+
+
+
+ )}
+ {defined(rules) && (
+
+
+ {t('Matching Ownership Rules')}
+
+
+ {rules
+ .slice(0, rulesExpanded ? rules.length : 3)
+ .map(([type, matched], i) => (
+
+
+ {matched}
+
+ ))}
+
+ {rules.length > 3 && !rulesExpanded ? (
+ setRulesExpanded(true)}
+ >
+ {t('View more')}
+
+ ) : null}
+
+ )}
+
+ }
+ {...props}
+ />
+ );
}
const tagColors = {
diff --git a/static/app/components/group/tagDistributionMeter.spec.tsx b/static/app/components/group/tagDistributionMeter.spec.tsx
index 049b55037310d7..62ddf09d5991b3 100644
--- a/static/app/components/group/tagDistributionMeter.spec.tsx
+++ b/static/app/components/group/tagDistributionMeter.spec.tsx
@@ -35,8 +35,8 @@ describe('TagDistributionMeter', function () {
group={GroupFixture({id: '1337'})}
organization={organization}
projectId="456"
- totalValues={tags[0].totalValues}
- topValues={tags[0].topValues}
+ totalValues={tags[0]!.totalValues}
+ topValues={tags[0]!.topValues}
/>
);
expect(
diff --git a/static/app/components/group/tagFacets/index.spec.tsx b/static/app/components/group/tagFacets/index.spec.tsx
index beb95474fad588..11fb2307431538 100644
--- a/static/app/components/group/tagFacets/index.spec.tsx
+++ b/static/app/components/group/tagFacets/index.spec.tsx
@@ -139,9 +139,9 @@ describe('Tag Facets', function () {
);
await waitFor(() => {
expect(screen.getByRole('listitem', {name: 'os'})).toBeInTheDocument();
- expect(screen.getByRole('listitem', {name: 'device'})).toBeInTheDocument();
- expect(screen.getByRole('listitem', {name: 'release'})).toBeInTheDocument();
});
+ expect(screen.getByRole('listitem', {name: 'device'})).toBeInTheDocument();
+ expect(screen.getByRole('listitem', {name: 'release'})).toBeInTheDocument();
});
it('expands first tag distribution by default', async function () {
diff --git a/static/app/components/group/tagFacets/index.tsx b/static/app/components/group/tagFacets/index.tsx
index bc00bffee094ab..56549f9545ee86 100644
--- a/static/app/components/group/tagFacets/index.tsx
+++ b/static/app/components/group/tagFacets/index.tsx
@@ -51,8 +51,8 @@ export function TAGS_FORMATTER(tagsData: Record) {
Object.keys(tagsData).forEach(tagKey => {
if (tagKey === 'release') {
transformedTagsData[tagKey] = {
- ...tagsData[tagKey],
- topValues: tagsData[tagKey].topValues.map(topValue => {
+ ...tagsData[tagKey]!,
+ topValues: tagsData[tagKey]!.topValues.map(topValue => {
return {
...topValue,
name: formatVersion(topValue.name),
@@ -61,8 +61,8 @@ export function TAGS_FORMATTER(tagsData: Record) {
};
} else if (tagKey === 'device') {
transformedTagsData[tagKey] = {
- ...tagsData[tagKey],
- topValues: tagsData[tagKey].topValues.map(topValue => {
+ ...tagsData[tagKey]!,
+ topValues: tagsData[tagKey]!.topValues.map(topValue => {
return {
...topValue,
name: topValue.readable ?? topValue.name,
@@ -70,7 +70,7 @@ export function TAGS_FORMATTER(tagsData: Record) {
}),
};
} else {
- transformedTagsData[tagKey] = tagsData[tagKey];
+ transformedTagsData[tagKey] = tagsData[tagKey]!;
}
});
diff --git a/static/app/components/group/tagFacets/tagFacetsDistributionMeter.tsx b/static/app/components/group/tagFacets/tagFacetsDistributionMeter.tsx
index 0f4ddddcefd2e2..6e1e79ed39ddfe 100644
--- a/static/app/components/group/tagFacets/tagFacetsDistributionMeter.tsx
+++ b/static/app/components/group/tagFacets/tagFacetsDistributionMeter.tsx
@@ -67,9 +67,9 @@ function TagFacetsDistributionMeter({
- {topSegments[0].name || t('n/a')}
+ {topSegments[0]!.name || t('n/a')}
) : (
{/* if the first segment is 6% or less, the label won't fit cleanly into the segment, so don't show the label */}
@@ -180,7 +180,7 @@ function TagFacetsDistributionMeter({
onMouseLeave={() => setHoveredValue(null)}
>
diff --git a/static/app/components/group/tagFacets/tagFacetsTypes.tsx b/static/app/components/group/tagFacets/tagFacetsTypes.tsx
deleted file mode 100644
index 467e59da472042..00000000000000
--- a/static/app/components/group/tagFacets/tagFacetsTypes.tsx
+++ /dev/null
@@ -1,17 +0,0 @@
-import type {ReactNode} from 'react';
-
-import type {Event} from 'sentry/types/event';
-import type {TagWithTopValues} from 'sentry/types/group';
-import type {Environment, Project} from 'sentry/types/project';
-
-export type TagFacetsProps = {
- environments: Environment[];
- groupId: string;
- project: Project;
- tagKeys: string[];
- event?: Event;
- tagFormatter?: (
- tagsData: Record
- ) => Record;
- title?: ReactNode;
-};
diff --git a/static/app/components/guidedSteps/guidedSteps.tsx b/static/app/components/guidedSteps/guidedSteps.tsx
index e5c5c1395d1bb4..491b00777e54e7 100644
--- a/static/app/components/guidedSteps/guidedSteps.tsx
+++ b/static/app/components/guidedSteps/guidedSteps.tsx
@@ -67,7 +67,7 @@ function useGuidedStepsContentValue({
// render and that step order does not change.
const registerStep = useCallback((props: RegisterStepInfo) => {
if (registeredStepsRef.current[props.stepKey]) {
- registeredStepsRef.current[props.stepKey].isCompleted = props.isCompleted;
+ registeredStepsRef.current[props.stepKey]!.isCompleted = props.isCompleted;
return;
}
const numRegisteredSteps = Object.keys(registeredStepsRef.current).length + 1;
diff --git a/static/app/components/idBadge/index.stories.tsx b/static/app/components/idBadge/index.stories.tsx
index d2c5e0eab4c2ca..abcf2d8d3430d0 100644
--- a/static/app/components/idBadge/index.stories.tsx
+++ b/static/app/components/idBadge/index.stories.tsx
@@ -42,7 +42,7 @@ export default storyBook(IdBadge, story => {
return ;
}
- return ;
+ return ;
});
story('Project', () => {
@@ -53,7 +53,7 @@ export default storyBook(IdBadge, story => {
return ;
}
- return ;
+ return ;
});
story('User', () => {
@@ -89,7 +89,7 @@ export default storyBook(IdBadge, story => {
teamRoleList: [],
teamRoles: [],
teams: [],
- user: user,
+ user,
flags: {
'idp:provisioned': false,
'idp:role-restricted': false,
@@ -116,8 +116,8 @@ export default storyBook(IdBadge, story => {
const teamActor: Actor = {
type: 'team',
- id: teams[0].id,
- name: teams[0].name,
+ id: teams[0]!.id,
+ name: teams[0]!.name,
};
return (
diff --git a/static/app/components/idBadge/memberBadge.spec.tsx b/static/app/components/idBadge/memberBadge.spec.tsx
index e4c959a25fb60f..737c06c4ea36c8 100644
--- a/static/app/components/idBadge/memberBadge.spec.tsx
+++ b/static/app/components/idBadge/memberBadge.spec.tsx
@@ -6,7 +6,7 @@ import {render, screen} from 'sentry-test/reactTestingLibrary';
import MemberBadge from 'sentry/components/idBadge/memberBadge';
describe('MemberBadge', function () {
- let member;
+ let member!: ReturnType;
beforeEach(() => {
member = MemberFixture();
});
diff --git a/static/app/components/inactivePlugins.spec.tsx b/static/app/components/inactivePlugins.spec.tsx
index 89067a6d7c52be..32f4b76699a8cf 100644
--- a/static/app/components/inactivePlugins.spec.tsx
+++ b/static/app/components/inactivePlugins.spec.tsx
@@ -20,7 +20,7 @@ describe('InactivePlugins', function () {
const enableFn = jest.fn();
const plugins = PluginsFixture();
render( );
- await userEvent.click(screen.getByRole('button', {name: plugins[0].name}));
+ await userEvent.click(screen.getByRole('button', {name: plugins[0]!.name}));
expect(enableFn).toHaveBeenCalledWith(expect.objectContaining(plugins[0]));
});
});
diff --git a/static/app/components/indicators.spec.tsx b/static/app/components/indicators.spec.tsx
index 5883ecbd211a99..dc8a4708c0998c 100644
--- a/static/app/components/indicators.spec.tsx
+++ b/static/app/components/indicators.spec.tsx
@@ -5,6 +5,7 @@ import {
addMessage,
addSuccessMessage,
clearIndicators,
+ type Indicator,
} from 'sentry/actionCreators/indicator';
import Indicators from 'sentry/components/indicators';
import IndicatorStore from 'sentry/stores/indicatorStore';
@@ -39,7 +40,7 @@ describe('Indicators', function () {
const {container} = render( );
// when "type" is empty, we should treat it as loading state
- let indicator;
+ let indicator!: Indicator;
act(() => {
indicator = IndicatorStore.add('Loading');
});
diff --git a/static/app/components/issueDiff/index.tsx b/static/app/components/issueDiff/index.tsx
index 859368cbbe45bd..1f90a1c2db80ee 100644
--- a/static/app/components/issueDiff/index.tsx
+++ b/static/app/components/issueDiff/index.tsx
@@ -118,7 +118,7 @@ class IssueDiff extends Component {
parent_transaction: this.getTransaction(
targetEventData?.tags ? targetEventData.tags : []
),
- shouldBeGrouped: shouldBeGrouped,
+ shouldBeGrouped,
});
}
} catch {
diff --git a/static/app/components/issues/groupList.tsx b/static/app/components/issues/groupList.tsx
index 7992c02d8b856e..c5f873838b77fe 100644
--- a/static/app/components/issues/groupList.tsx
+++ b/static/app/components/issues/groupList.tsx
@@ -22,7 +22,6 @@ import GroupStore from 'sentry/stores/groupStore';
import {space} from 'sentry/styles/space';
import type {Group} from 'sentry/types/group';
import type {WithRouterProps} from 'sentry/types/legacyReactRouter';
-import {browserHistory} from 'sentry/utils/browserHistory';
import withApi from 'sentry/utils/withApi';
// eslint-disable-next-line no-restricted-imports
import withSentryRouter from 'sentry/utils/withSentryRouter';
@@ -216,12 +215,12 @@ class GroupList extends Component {
return queryParams;
}
- handleCursorChange(
+ handleCursorChange = (
cursor: string | undefined,
path: string,
query: Record,
pageDiff: number
- ) {
+ ) => {
const queryPageInt = parseInt(query.page, 10);
let nextPage: number | undefined = isNaN(queryPageInt)
? pageDiff
@@ -235,11 +234,11 @@ class GroupList extends Component {
nextPage = undefined;
}
- browserHistory.push({
+ this.props.router.push({
pathname: path,
query: {...query, cursor, page: nextPage},
});
- }
+ };
onGroupChange() {
const groups = GroupStore.getAllItems() as Group[];
diff --git a/static/app/components/lastCommit.spec.tsx b/static/app/components/lastCommit.spec.tsx
index f45b8b070f7bd5..7884c1f0b52398 100644
--- a/static/app/components/lastCommit.spec.tsx
+++ b/static/app/components/lastCommit.spec.tsx
@@ -5,7 +5,7 @@ import {render, screen} from 'sentry-test/reactTestingLibrary';
import LastCommit from 'sentry/components/lastCommit';
describe('LastCommit', function () {
- let mockedCommit;
+ let mockedCommit!: ReturnType;
const mockedCommitTitle = '(improve) Add Links to Spike-Protection Email (#2408)';
beforeEach(() => {
@@ -17,7 +17,7 @@ describe('LastCommit', function () {
});
it('links to the commit in GitHub', function () {
- mockedCommit.repository.provider = {id: 'github'};
+ mockedCommit.repository!.provider = {id: 'github', name: 'GitHub'};
const mockedCommitURL = `${mockedCommit.repository?.url}/commit/${mockedCommit.id}`;
render( );
diff --git a/static/app/components/lastCommit.tsx b/static/app/components/lastCommit.tsx
index e6cd82bd56b0e9..aef8965844fa39 100644
--- a/static/app/components/lastCommit.tsx
+++ b/static/app/components/lastCommit.tsx
@@ -38,7 +38,7 @@ function LastCommit({commit}: Props) {
);
}
- let finalMessage = message.split(/\n/)[0];
+ let finalMessage = message.split(/\n/)[0]!;
if (finalMessage.length > 100) {
let truncated = finalMessage.substring(0, 90);
const words = truncated.split(/ /);
diff --git a/static/app/components/lazyLoad.spec.tsx b/static/app/components/lazyLoad.spec.tsx
index d7eca30ff2908f..03c8fb9d4fb3f0 100644
--- a/static/app/components/lazyLoad.spec.tsx
+++ b/static/app/components/lazyLoad.spec.tsx
@@ -49,7 +49,6 @@ describe('LazyLoad', function () {
});
it('renders with error message when promise is rejected', async function () {
- // eslint-disable-next-line no-console
jest.spyOn(console, 'error').mockImplementation(jest.fn());
const getComponent = () => Promise.reject(new Error('Could not load component'));
diff --git a/static/app/components/letterAvatar.tsx b/static/app/components/letterAvatar.tsx
index 93a25bdc0768c3..e98bfb8433c84b 100644
--- a/static/app/components/letterAvatar.tsx
+++ b/static/app/components/letterAvatar.tsx
@@ -37,7 +37,7 @@ function getColor(identifier: string | undefined): Color {
}
const id = hashIdentifier(identifier);
- return COLORS[id % COLORS.length];
+ return COLORS[id % COLORS.length]!;
}
function getInitials(displayName: string | undefined) {
@@ -46,9 +46,9 @@ function getInitials(displayName: string | undefined) {
);
// Use Array.from as slicing and substring() work on ucs2 segments which
// results in only getting half of any 4+ byte character.
- let initials = Array.from(names[0])[0];
+ let initials = Array.from(names[0]!)[0]!;
if (names.length > 1) {
- initials += Array.from(names[names.length - 1])[0];
+ initials += Array.from(names[names.length - 1]!)[0]!;
}
return initials.toUpperCase();
}
diff --git a/static/app/components/metrics/chart/chart.tsx b/static/app/components/metrics/chart/chart.tsx
index e2052fd3f626a9..c238100b511fbe 100644
--- a/static/app/components/metrics/chart/chart.tsx
+++ b/static/app/components/metrics/chart/chart.tsx
@@ -86,7 +86,7 @@ function isNonZeroValue(value: number | null) {
function addSeriesPadding(data: Series['data']) {
const hasNonZeroSibling = (index: number) => {
return (
- isNonZeroValue(data[index - 1]?.value) || isNonZeroValue(data[index + 1]?.value)
+ isNonZeroValue(data[index - 1]!?.value) || isNonZeroValue(data[index + 1]!?.value)
);
};
const paddingIndices = new Set();
@@ -142,9 +142,9 @@ export const MetricChart = memo(
}
});
- const bucketSize = series[0]?.data[1]?.name - series[0]?.data[0]?.name;
+ const bucketSize = series[0]!?.data[1]!?.name - series[0]!?.data[0]!?.name;
const isSubMinuteBucket = bucketSize < 60_000;
- const lastBucketTimestamp = series[0]?.data?.[series[0]?.data?.length - 1]?.name;
+ const lastBucketTimestamp = series[0]!?.data?.[series[0]!?.data?.length - 1]!?.name;
const ingestionBuckets = useMemo(
() => getIngestionDelayBucketCount(bucketSize, lastBucketTimestamp),
[bucketSize, lastBucketTimestamp]
@@ -245,7 +245,7 @@ export const MetricChart = memo(
// Filter padding datapoints from tooltip
if (param.value[1] === 0) {
- const currentSeries = seriesToShow[param.seriesIndex];
+ const currentSeries = seriesToShow[param.seriesIndex]!;
const paddingIndices =
'paddingIndices' in currentSeries
? currentSeries.paddingIndices
diff --git a/static/app/components/metrics/chart/useFocusArea.tsx b/static/app/components/metrics/chart/useFocusArea.tsx
index f381fc3f3b8674..8fbbe03d4682e0 100644
--- a/static/app/components/metrics/chart/useFocusArea.tsx
+++ b/static/app/components/metrics/chart/useFocusArea.tsx
@@ -278,16 +278,16 @@ function FocusAreaOverlay({
return;
}
- const widthPx = bottomRight[0] - topLeft[0];
- const heightPx = bottomRight[1] - topLeft[1];
+ const widthPx = bottomRight[0]! - topLeft[0]!;
+ const heightPx = bottomRight[1]! - topLeft[1]!;
- const resultTop = useFullYAxis ? '0px' : `${topLeft[1].toPrecision(5)}px`;
+ const resultTop = useFullYAxis ? '0px' : `${topLeft[1]!.toPrecision(5)}px`;
const resultHeight = useFullYAxis
? `${CHART_HEIGHT}px`
: `${heightPx.toPrecision(5)}px`;
// Ensure the focus area rect is always within the chart bounds
- const left = Math.max(topLeft[0], 0);
+ const left = Math.max(topLeft[0]!, 0);
const width = Math.min(widthPx, chartInstance.getWidth() - left);
const newPosition = {
@@ -347,14 +347,14 @@ const getSelectionRange = (
useFullYAxis: boolean,
boundingRect: ValueRect
): SelectionRange => {
- const startTimestamp = Math.min(...rect.coordRange[0]);
- const endTimestamp = Math.max(...rect.coordRange[0]);
+ const startTimestamp = Math.min(...rect.coordRange![0]!);
+ const endTimestamp = Math.max(...rect.coordRange![0]!);
const startDate = getDateString(Math.max(startTimestamp, boundingRect.xMin));
const endDate = getDateString(Math.min(endTimestamp, boundingRect.xMax));
- const min = useFullYAxis ? NaN : Math.min(...rect.coordRange[1]);
- const max = useFullYAxis ? NaN : Math.max(...rect.coordRange[1]);
+ const min = useFullYAxis ? NaN : Math.min(...rect.coordRange[1]!);
+ const max = useFullYAxis ? NaN : Math.max(...rect.coordRange[1]!);
return {
start: startDate,
diff --git a/static/app/components/metrics/chart/useMetricChartSamples.tsx b/static/app/components/metrics/chart/useMetricChartSamples.tsx
index 83cf415ba77f96..e7a25ab26796d1 100644
--- a/static/app/components/metrics/chart/useMetricChartSamples.tsx
+++ b/static/app/components/metrics/chart/useMetricChartSamples.tsx
@@ -164,7 +164,10 @@ export function useMetricChartSamples({
const value = getSummaryValueForAggregation(sample.summary, aggregation);
const yValue = value;
- const [xPosition, yPosition] = fitToValueRect(xValue, yValue, valueRect);
+ const [xPosition, yPosition] = fitToValueRect(xValue, yValue, valueRect) as [
+ number,
+ number,
+ ];
return {
seriesName: sample.id,
diff --git a/static/app/components/metrics/chart/useMetricReleases.tsx b/static/app/components/metrics/chart/useMetricReleases.tsx
index 932e9a5c5ab193..2b2b9b03f7f41c 100644
--- a/static/app/components/metrics/chart/useMetricReleases.tsx
+++ b/static/app/components/metrics/chart/useMetricReleases.tsx
@@ -86,7 +86,7 @@ export function useReleases() {
if (pageLinks) {
const paginationObject = parseLinkHeader(pageLinks);
hasMore = paginationObject?.next?.results ?? false;
- queryObj.cursor = paginationObject.next.cursor;
+ queryObj.cursor = paginationObject.next!.cursor;
} else {
hasMore = false;
}
diff --git a/static/app/components/metrics/chart/utils.tsx b/static/app/components/metrics/chart/utils.tsx
index af86decedbdfed..42da326d82ab18 100644
--- a/static/app/components/metrics/chart/utils.tsx
+++ b/static/app/components/metrics/chart/utils.tsx
@@ -53,8 +53,8 @@ export function getValueRect(chartRef?: RefObject): ValueRect {
const xMin = moment(topLeft[0]).valueOf();
const xMax = moment(bottomRight[0]).valueOf();
- const yMin = Math.max(0, bottomRight[1]);
- const yMax = topLeft[1];
+ const yMin = Math.max(0, bottomRight[1]!);
+ const yMax = topLeft[1]!;
return {
xMin,
diff --git a/static/app/components/metrics/customMetricsEventData.spec.tsx b/static/app/components/metrics/customMetricsEventData.spec.tsx
deleted file mode 100644
index 16069063b11d48..00000000000000
--- a/static/app/components/metrics/customMetricsEventData.spec.tsx
+++ /dev/null
@@ -1,317 +0,0 @@
-import {OrganizationFixture} from 'sentry-fixture/organization';
-
-import {render, screen} from 'sentry-test/reactTestingLibrary';
-import {textWithMarkupMatcher} from 'sentry-test/utils';
-
-import type {MetricsSummary} from 'sentry/components/events/interfaces/spans/types';
-import {CustomMetricsEventData} from 'sentry/components/metrics/customMetricsEventData';
-import type {
- MetricsQueryApiResponse,
- MetricsQueryApiResponseLastMeta,
-} from 'sentry/types/metrics';
-
-const organization = OrganizationFixture({features: ['custom-metrics']});
-
-describe('CustomMetricsEventData', () => {
- beforeEach(() => {
- MockApiClient.addMockResponse({
- url: `/organizations/${organization.slug}/metrics/query/`,
- method: 'POST',
- body: {
- data: [[{by: {}, series: [], totals: 2}]],
- meta: [
- [
- {
- unit: 'nanoseconds',
- scaling_factor: 1000000,
- group_bys: {},
- limit: null,
- order: 'asc',
- } as MetricsQueryApiResponseLastMeta,
- ],
- ],
- end: '2023-09-01T01:00:00Z',
- intervals: [],
- start: '2023-09-01T00:00:00Z',
- } satisfies MetricsQueryApiResponse,
- });
- });
-
- it('renders empty (no feature flag)', () => {
- const metricsSummary: MetricsSummary = {
- 'd:custom/my.metric@second': [
- {
- count: 2,
- min: 1,
- max: 2,
- sum: 3,
- tags: {
- foo: 'bar',
- },
- },
- ],
- };
-
- const {container} = render(
-
- );
- expect(container).toBeEmptyDOMElement();
- });
-
- it('renders empty (no data)', () => {
- const {container} = render(
- ,
- {
- organization,
- }
- );
- expect(container).toBeEmptyDOMElement();
- });
-
- it('renders (all information)', () => {
- // This test fails without the mock below, because a nested component uses @container query
- // that is not supported by the version of jsdom currently used by jest.
- jest.spyOn(console, 'error').mockImplementation();
-
- const metricsSummary: MetricsSummary = {
- 'd:custom/my.metric@second': [
- {
- count: 2,
- min: 1,
- max: 2,
- sum: 3,
- tags: {
- foo: 'bar',
- },
- },
- ],
- };
-
- render(
- ,
- {
- organization,
- }
- );
-
- expect(screen.getByText('Custom Metrics')).toBeInTheDocument();
-
- expect(screen.getByText('my.metric')).toBeInTheDocument();
- expect(screen.getByRole('link', {name: 'View Metric'})).toBeInTheDocument();
-
- expect(screen.getByText(textWithMarkupMatcher(/Value: 1\.5s/))).toBeInTheDocument();
-
- expect(screen.getByText(/Tags: foo:bar/)).toBeInTheDocument();
- });
-
- it('renders (counter metric)', () => {
- const metricsSummary: MetricsSummary = {
- 'c:custom/my.metric@second': [
- {
- count: 1,
- min: 1,
- max: 1,
- sum: 1,
- tags: {
- foo: 'bar',
- },
- },
- ],
- };
-
- render(
- ,
- {organization}
- );
-
- expect(screen.getByText('Custom Metrics')).toBeInTheDocument();
-
- expect(screen.getByText('my.metric')).toBeInTheDocument();
- expect(screen.getByRole('link', {name: 'View Metric'})).toBeInTheDocument();
-
- expect(screen.getByText(textWithMarkupMatcher(/Count: 1/))).toBeInTheDocument();
-
- expect(screen.getByText(/Tags: foo:bar/)).toBeInTheDocument();
- });
-
- it('renders (no tags)', async () => {
- const metricsSummary: MetricsSummary = {
- 'c:custom/my.metric@second': [
- {
- count: 1,
- min: 1,
- max: 1,
- sum: 1,
- tags: null,
- },
- ],
- };
-
- render(
- ,
- {
- organization,
- }
- );
-
- expect(screen.getByText('Custom Metrics')).toBeInTheDocument();
-
- expect(screen.getByText('my.metric')).toBeInTheDocument();
- expect(screen.getByRole('link', {name: 'View Metric'})).toBeInTheDocument();
- expect(screen.getByText(textWithMarkupMatcher(/Count: 1/))).toBeInTheDocument();
-
- expect(
- await screen.findByText(textWithMarkupMatcher(/Tags: \(none\)/))
- ).toBeInTheDocument();
- });
-
- it('renders (empty tags)', async () => {
- const metricsSummary: MetricsSummary = {
- 'c:custom/my.metric@second': [
- {
- count: 1,
- min: 1,
- max: 1,
- sum: 1,
- tags: {},
- },
- ],
- };
-
- render(
- ,
- {
- organization,
- }
- );
-
- expect(screen.getByText('Custom Metrics')).toBeInTheDocument();
-
- expect(screen.getByText('my.metric')).toBeInTheDocument();
- expect(screen.getByRole('link', {name: 'View Metric'})).toBeInTheDocument();
- expect(screen.getByText(textWithMarkupMatcher(/Count: 1/))).toBeInTheDocument();
-
- expect(
- await screen.findByText(textWithMarkupMatcher(/Tags: \(none\)/))
- ).toBeInTheDocument();
- });
-
- it('renders (multiple)', () => {
- MockApiClient.addMockResponse({
- url: `/organizations/${organization.slug}/metrics/query/`,
- method: 'POST',
- body: {
- data: [
- [{by: {}, series: [], totals: 2}],
- [{by: {}, series: [], totals: 2}],
- [{by: {}, series: [], totals: 2}],
- ],
- meta: [
- [
- {
- unit: 'nanoseconds',
- scaling_factor: 1000000,
- group_bys: {},
- limit: null,
- order: 'asc',
- } as MetricsQueryApiResponseLastMeta,
- ],
- [
- {
- unit: 'nanoseconds',
- scaling_factor: null,
- group_bys: {},
- limit: null,
- order: 'asc',
- } as MetricsQueryApiResponseLastMeta,
- ],
- [
- {
- unit: 'nanoseconds',
- scaling_factor: 1000000,
- group_bys: {},
- limit: null,
- order: 'asc',
- } as MetricsQueryApiResponseLastMeta,
- ],
- ],
- end: '2023-09-01T01:00:00Z',
- intervals: [],
- start: '2023-09-01T00:00:00Z',
- } satisfies MetricsQueryApiResponse,
- });
-
- const metricsSummary: MetricsSummary = {
- 'd:custom/my.distribution@second': [
- {
- count: 2,
- min: 1,
- max: 2,
- sum: 3,
- tags: {
- foo: 'bar',
- },
- },
- {
- count: 1,
- min: 1,
- max: 1,
- sum: 1,
- tags: null,
- },
- ],
- 'c:custom/my.counter@second': [
- {
- count: 2,
- min: 1,
- max: 2,
- sum: 3,
- tags: {
- foo: 'bar',
- },
- },
- ],
- };
-
- render(
- ,
- {
- organization,
- }
- );
-
- expect(screen.getByText('Custom Metrics')).toBeInTheDocument();
-
- expect(screen.getByText('my.counter')).toBeInTheDocument();
- expect(screen.getAllByText('my.distribution')).toHaveLength(2);
- expect(screen.getAllByRole('link', {name: 'View Metric'})).toHaveLength(3);
- });
-});
diff --git a/static/app/components/metrics/customMetricsEventData.tsx b/static/app/components/metrics/customMetricsEventData.tsx
deleted file mode 100644
index 56f044ae768104..00000000000000
--- a/static/app/components/metrics/customMetricsEventData.tsx
+++ /dev/null
@@ -1,482 +0,0 @@
-import {Fragment, useMemo} from 'react';
-import {useTheme} from '@emotion/react';
-import styled from '@emotion/styled';
-
-import MarkLine from 'sentry/components/charts/components/markLine';
-import ScatterSeries from 'sentry/components/charts/series/scatterSeries';
-import type {
- MetricsSummary,
- MetricsSummaryItem,
-} from 'sentry/components/events/interfaces/spans/types';
-import {Hovercard} from 'sentry/components/hovercard';
-import {KeyValueTable, KeyValueTableRow} from 'sentry/components/keyValueTable';
-import {MetricChart} from 'sentry/components/metrics/chart/chart';
-import type {Series} from 'sentry/components/metrics/chart/types';
-import {normalizeDateTimeString} from 'sentry/components/organizations/pageFilters/parse';
-import {IconInfo} from 'sentry/icons';
-import {t} from 'sentry/locale';
-import {space} from 'sentry/styles/space';
-import type {
- MetricsQueryApiResponseLastMeta,
- MetricType,
- MRI,
-} from 'sentry/types/metrics';
-import type {Organization} from 'sentry/types/organization';
-import {defined} from 'sentry/utils';
-import {getDefaultAggregation, getMetricsUrl} from 'sentry/utils/metrics';
-import {hasCustomMetrics} from 'sentry/utils/metrics/features';
-import {formatMetricUsingUnit} from 'sentry/utils/metrics/formatters';
-import {formatMRI, isExtractedCustomMetric, parseMRI} from 'sentry/utils/metrics/mri';
-import {MetricDisplayType} from 'sentry/utils/metrics/types';
-import {useMetricsQuery} from 'sentry/utils/metrics/useMetricsQuery';
-import {middleEllipsis} from 'sentry/utils/string/middleEllipsis';
-import type {Color} from 'sentry/utils/theme';
-import useOrganization from 'sentry/utils/useOrganization';
-import {getSampleChartSymbol} from 'sentry/views/insights/common/views/spanSummaryPage/sampleList/durationChart/getSampleChartSymbol';
-import {getChartTimeseries} from 'sentry/views/metrics/widget';
-import {
- type SectionCardKeyValueList,
- TraceDrawerComponents,
-} from 'sentry/views/performance/newTraceDetails/traceDrawer/details/styles';
-
-function flattenMetricsSummary(
- metricsSummary: MetricsSummary
-): {item: MetricsSummaryItem; mri: MRI}[] {
- return (
- Object.entries(metricsSummary) as [
- keyof MetricsSummary,
- MetricsSummary[keyof MetricsSummary],
- ][]
- )
- .flatMap(([mri, items]) => (items || []).map(item => ({item, mri})))
- .filter(entry => !isExtractedCustomMetric(entry));
-}
-
-function tagToQuery(tagKey: string, tagValue: string) {
- return `${tagKey}:"${tagValue}"`;
-}
-
-export function eventHasCustomMetrics(
- organization: Organization,
- metricsSummary: MetricsSummary | undefined
-) {
- return (
- hasCustomMetrics(organization) &&
- metricsSummary &&
- flattenMetricsSummary(metricsSummary).length > 0
- );
-}
-
-const HALF_HOUR_IN_MS = 30 * 60 * 1000;
-
-interface DataRow {
- chartUnit: string;
- metricType: MetricType;
- metricUnit: string;
- mri: MRI;
- scalingFactor: number;
- summaryItem: MetricsSummaryItem;
- chartSeries?: Series;
- deviation?: number;
- deviationPercent?: number;
- itemAvg?: number;
- totalAvg?: number;
-}
-
-export function CustomMetricsEventData({
- metricsSummary,
- startTimestamp,
- projectId,
-}: {
- projectId: string;
- startTimestamp: number;
- metricsSummary?: MetricsSummary;
-}) {
- const organization = useOrganization();
-
- const start = new Date(startTimestamp * 1000 - HALF_HOUR_IN_MS).toISOString();
- const end = new Date(startTimestamp * 1000 + HALF_HOUR_IN_MS).toISOString();
-
- const metricsSummaryEntries = useMemo(
- () => (metricsSummary ? flattenMetricsSummary(metricsSummary) : []),
- [metricsSummary]
- );
-
- const queries = useMemo(
- () =>
- metricsSummaryEntries.map((entry, index) => ({
- mri: entry.mri,
- name: index.toString(),
- aggregation: getDefaultAggregation(entry.mri),
- query: Object.entries(entry.item.tags ?? {})
- .map(([tagKey, tagValue]) => tagToQuery(tagKey, tagValue))
- .join(' '),
- })),
- [metricsSummaryEntries]
- );
-
- const {data} = useMetricsQuery(queries, {
- projects: [parseInt(projectId, 10)],
- datetime: {start, end, period: null, utc: true},
- environments: [],
- });
-
- const chartSeries = useMemo(
- () =>
- data
- ? data.data.flatMap((entry, index) => {
- // Splitting the response to treat it like individual requests
- // TODO: improve utils for metric series generation
- return getChartTimeseries(
- {...data, data: [entry], meta: [data.meta[index]]},
- [queries[index]],
- {
- showQuerySymbol: false,
- }
- );
- })
- : [],
- [data, queries]
- );
-
- const dataRows = useMemo(
- () =>
- metricsSummaryEntries
- .map((entry, index) => {
- const entryData = data?.data?.[index][0];
- const dataMeta = data?.meta?.[index];
- const lastMeta = dataMeta?.[
- dataMeta?.length - 1
- ] as MetricsQueryApiResponseLastMeta;
- const parsedMRI = parseMRI(entry.mri);
- const type = parsedMRI?.type || 'c';
- const unit = parsedMRI?.unit || 'none';
- const summaryItem = entry.item;
- const scalingFactor = lastMeta?.scaling_factor || 1;
- const totalAvg = entryData?.totals;
- const itemAvg =
- summaryItem.sum && summaryItem.count
- ? summaryItem.sum / summaryItem.count
- : undefined;
- const deviation =
- itemAvg && totalAvg ? itemAvg - totalAvg / scalingFactor : undefined;
- const deviationPercent =
- deviation && totalAvg ? deviation / (totalAvg / scalingFactor) : undefined;
-
- return {
- mri: entry.mri,
- itemAvg,
- totalAvg,
- scalingFactor,
- chartSeries: chartSeries[index],
- chartUnit: lastMeta?.unit ?? 'none',
- metricType: type,
- metricUnit: unit,
- summaryItem: summaryItem,
- deviation,
- deviationPercent,
- };
- })
- .sort((a, b) => {
- // Counters should be on bottom
- if (a.metricType === 'c' && b.metricType !== 'c') {
- return 1;
- }
-
- if (a.metricType !== 'c' && b.metricType === 'c') {
- return -1;
- }
-
- // Sort by highest absolute deviation
- return Math.abs(b.deviationPercent || 0) - Math.abs(a.deviationPercent || 0);
- }),
- [chartSeries, data?.data, data?.meta, metricsSummaryEntries]
- );
-
- if (!hasCustomMetrics(organization) || metricsSummaryEntries.length === 0) {
- return null;
- }
-
- const items: SectionCardKeyValueList = [];
-
- dataRows.forEach(dataRow => {
- const {mri, summaryItem} = dataRow;
- const name = formatMRI(mri);
- items.push({
- key: `metric-${name}`,
- subject: name,
- value: (
-
- {' '}
-
-
-
-
- }
- linkText={t('View Metric')}
- linkTarget={getMetricsUrl(organization.slug, {
- start: normalizeDateTimeString(start),
- end: normalizeDateTimeString(end),
- interval: '10s',
- widgets: [
- {
- mri: mri,
- displayType: MetricDisplayType.LINE,
- aggregation: getDefaultAggregation(mri),
- query: Object.entries(summaryItem.tags ?? {})
- .map(([tagKey, tagValue]) => tagToQuery(tagKey, tagValue))
- .join(' '),
- },
- ],
- })}
- />
- ),
- });
- });
-
- return (
-
- );
-}
-
-function ValueRenderer({dataRow}: {dataRow: DataRow}) {
- const {mri, summaryItem} = dataRow;
- const parsedMRI = parseMRI(mri);
- const unit = parsedMRI?.unit ?? 'none';
- const type = parsedMRI?.type ?? 'c';
-
- // For counters the other stats offer little value, so we only show the count
- if (type === 'c' || !summaryItem.count) {
- return t('Count: %s', formatMetricUsingUnit(summaryItem.count, 'none'));
- }
-
- const avg = summaryItem.sum && summaryItem.count && summaryItem.sum / summaryItem.count;
-
- return (
-
- {t('Value:')} {formatMetricUsingUnit(avg, unit) ?? t('(none)')}
- {summaryItem.count > 1 && (
-
-
-
-
-
-
-
-
- }
- >
-
-
- )}
-
- );
-}
-
-function DeviationRenderer({
- dataRow,
- startTimestamp,
-}: {
- dataRow: DataRow;
- startTimestamp: number;
-}) {
- const {
- mri,
- totalAvg,
- itemAvg,
- deviation,
- deviationPercent,
- chartUnit,
- chartSeries,
- scalingFactor,
- } = dataRow;
- const theme = useTheme();
- const parsedMRI = parseMRI(mri);
- const type = parsedMRI?.type ?? 'c';
-
- if (
- !defined(totalAvg) ||
- !defined(itemAvg) ||
- !defined(deviation) ||
- !defined(deviationPercent) ||
- type === 'c'
- ) {
- return null;
- }
- const totals = totalAvg / scalingFactor;
- const isPositive = deviation > 0;
- const isNeutral = Math.abs(deviationPercent) < 0.03;
-
- const valueColor: Color = isNeutral ? 'gray300' : isPositive ? 'red300' : 'green300';
-
- const sign = deviation === 0 ? '±' : isPositive ? '+' : '';
- const symbol = isNeutral ? '' : isPositive ? '▲' : '▼';
-
- return (
-
- {`avg(${middleEllipsis(formatMRI(mri), 40, /\.|-|_/)})`}
- {t("Span's start time -/+ 30 min")}
-
- }
- body={
- chartSeries && (
- {
- return `${formatMetricUsingUnit(
- (params.data as any).value[1],
- chartUnit || 'none'
- )}`;
- },
- },
- },
- ],
- ...getSampleChartSymbol(itemAvg, totals, theme),
- symbolSize: 14,
- animation: false,
- silent: true,
- }),
- ]}
- height={160}
- />
- )
- }
- >
-
- {symbol} {sign}
- {formatMetricUsingUnit(deviationPercent * 100, 'percent')}
-
-
- );
-}
-
-const STANDARD_TAGS = ['environment', 'release', 'transaction'];
-
-function TagsRenderer({tags}: {tags: Record | null}) {
- const tagString = Object.entries(tags || {})
- .filter(([tagKey]) => !STANDARD_TAGS.includes(tagKey))
- .reduce((acc, [tagKey, tagValue], index) => {
- if (index > 0) {
- acc += ', ';
- }
- acc += `${tagKey}:${tagValue}`;
- return acc;
- }, '');
-
- if (tagString === '') {
- return (
-
- {t('Tags:')} {t('(none)')}
-
- );
- }
-
- return t('Tags: %s', tagString);
-}
-
-const ChartHovercard = styled(Hovercard)`
- width: 450px;
-`;
-
-const ValueCell = styled('div')`
- display: flex;
- align-items: center;
- font-family: ${p => p.theme.text.familyMono};
-`;
-
-const NoValue = styled('span')`
- color: ${p => p.theme.gray300};
-`;
-
-const ValueWrapper = styled(ValueCell)`
- display: inline-grid;
- grid-template-columns: max-content max-content;
- gap: ${space(1)};
- align-items: center;
-`;
-
-const DeviationValue = styled('span')<{
- textColor: Color;
-}>`
- color: ${p => p.theme[p.textColor]};
- cursor: default;
-`;
-
-const HoverCardHeading = styled('div')`
- font-size: ${p => p.theme.fontSizeLarge};
- padding-bottom: ${space(0.5)};
-`;
-
-const HoverCardSubheading = styled('div')`
- font-size: ${p => p.theme.fontSizeSmall};
- color: ${p => p.theme.subText};
-`;
-
-const ValuesHovercard = styled(Hovercard)`
- width: 200px;
- & .hovercard-body {
- padding: ${space(0.5)};
- }
-`;
-
-const StyledKeyValueTable = styled(KeyValueTable)`
- margin-bottom: 0;
-`;
diff --git a/static/app/components/metrics/metricSamplesTable.tsx b/static/app/components/metrics/metricSamplesTable.tsx
index 0394dbb1a07c28..2e4c24fb4784f6 100644
--- a/static/app/components/metrics/metricSamplesTable.tsx
+++ b/static/app/components/metrics/metricSamplesTable.tsx
@@ -6,6 +6,7 @@ import debounce from 'lodash/debounce';
import {Button, LinkButton} from 'sentry/components/button';
import {Flex} from 'sentry/components/container/flex';
import {CopyToClipboardButton} from 'sentry/components/copyToClipboardButton';
+import SmartSearchBar from 'sentry/components/deprecatedSmartSearchBar';
import EmptyStateWarning from 'sentry/components/emptyStateWarning';
import GridEditable, {
COL_WIDTH_UNDEFINED,
@@ -17,7 +18,6 @@ import ProjectBadge from 'sentry/components/idBadge/projectBadge';
import Link from 'sentry/components/links/link';
import type {SelectionRange} from 'sentry/components/metrics/chart/types';
import PerformanceDuration from 'sentry/components/performanceDuration';
-import SmartSearchBar from 'sentry/components/smartSearchBar';
import {Tooltip} from 'sentry/components/tooltip';
import {IconProfiling} from 'sentry/icons';
import {t} from 'sentry/locale';
diff --git a/static/app/components/metrics/metricSearchBar.tsx b/static/app/components/metrics/metricSearchBar.tsx
index b73d67a407aa78..f336cc2cf5f4f4 100644
--- a/static/app/components/metrics/metricSearchBar.tsx
+++ b/static/app/components/metrics/metricSearchBar.tsx
@@ -1,12 +1,12 @@
import {useCallback, useMemo} from 'react';
import {css, type SerializedStyles} from '@emotion/react';
+import type {SmartSearchBarProps} from 'sentry/components/deprecatedSmartSearchBar';
import {QueryFieldGroup} from 'sentry/components/metrics/queryFieldGroup';
import {
SearchQueryBuilder,
type SearchQueryBuilderProps,
} from 'sentry/components/searchQueryBuilder';
-import type {SmartSearchBarProps} from 'sentry/components/smartSearchBar';
import {t} from 'sentry/locale';
import {SavedSearchType, type TagCollection} from 'sentry/types/group';
import type {MRI} from 'sentry/types/metrics';
diff --git a/static/app/components/metrics/mriSelect/index.spec.tsx b/static/app/components/metrics/mriSelect/index.spec.tsx
index 09b5e850de5b51..ec6500f2cecb84 100644
--- a/static/app/components/metrics/mriSelect/index.spec.tsx
+++ b/static/app/components/metrics/mriSelect/index.spec.tsx
@@ -13,7 +13,7 @@ function createMetricMeta(
operations: [],
projectIds: [],
type: 'd',
- unit: unit,
+ unit,
};
}
diff --git a/static/app/components/metrics/mriSelect/index.tsx b/static/app/components/metrics/mriSelect/index.tsx
index e6123829410d70..7f8346be3141ca 100644
--- a/static/app/components/metrics/mriSelect/index.tsx
+++ b/static/app/components/metrics/mriSelect/index.tsx
@@ -59,7 +59,6 @@ function useMriMode() {
if (mriModeKeyPressed) {
setMriMode(value => !value);
}
- // eslint-disable-next-line react-hooks/exhaustive-deps
}, [mriModeKeyPressed]);
return mriMode;
diff --git a/static/app/components/metrics/queryFieldGroup.tsx b/static/app/components/metrics/queryFieldGroup.tsx
index 2f2cc87d58d740..e7b398753353f4 100644
--- a/static/app/components/metrics/queryFieldGroup.tsx
+++ b/static/app/components/metrics/queryFieldGroup.tsx
@@ -10,9 +10,9 @@ import {
type SelectKey,
type SingleSelectProps,
} from 'sentry/components/compactSelect';
+import _SmartSearchBar from 'sentry/components/deprecatedSmartSearchBar';
import {DebouncedInput as _DebouncedInput} from 'sentry/components/modals/metricWidgetViewerModal/queries';
import {SearchQueryBuilder as _SearchQueryBuilder} from 'sentry/components/searchQueryBuilder';
-import _SmartSearchBar from 'sentry/components/smartSearchBar';
import {Tooltip} from 'sentry/components/tooltip';
import {SLOW_TOOLTIP_DELAY} from 'sentry/constants';
import {IconDelete} from 'sentry/icons';
diff --git a/static/app/components/modals/addTempestCredentialsModal.tsx b/static/app/components/modals/addTempestCredentialsModal.tsx
new file mode 100644
index 00000000000000..e805c692d50701
--- /dev/null
+++ b/static/app/components/modals/addTempestCredentialsModal.tsx
@@ -0,0 +1,37 @@
+import {Fragment} from 'react';
+
+import type {ModalRenderProps} from 'sentry/actionCreators/modal';
+import {t} from 'sentry/locale';
+import type {Organization} from 'sentry/types/organization';
+import type {Project} from 'sentry/types/project';
+import AddTempestCredentialsForm from 'sentry/views/settings/project/tempest/addTempestCredentialsForm';
+import {useFetchTempestCredentials} from 'sentry/views/settings/project/tempest/hooks/useFetchTempestCredentials';
+
+interface Props extends ModalRenderProps {
+ organization: Organization;
+ project: Project;
+}
+
+export default function AddCredentialsModal({Body, Header, ...props}: Props) {
+ const {closeModal, organization, project} = props;
+ const {invalidateCredentialsCache} = useFetchTempestCredentials(organization, project);
+
+ const onSuccess = () => {
+ invalidateCredentialsCache();
+ closeModal();
+ };
+
+ return (
+
+ {t('Add New Credentials')}
+
+
+
+
+ );
+}
diff --git a/static/app/components/modals/commandPalette.spec.tsx b/static/app/components/modals/commandPalette.spec.tsx
index fb752b1d3cecc3..2c7e087e16bd62 100644
--- a/static/app/components/modals/commandPalette.spec.tsx
+++ b/static/app/components/modals/commandPalette.spec.tsx
@@ -113,7 +113,7 @@ describe('Command Palette Modal', function () {
expect(badges[0]).toHaveTextContent('billy-org Dashboard');
expect(badges[1]).toHaveTextContent('billy-org Settings');
- await userEvent.click(badges[0]);
+ await userEvent.click(badges[0]!);
expect(navigateTo).toHaveBeenCalledWith('/billy-org/', expect.anything(), undefined);
});
diff --git a/static/app/components/modals/createDashboardFromMetricsModal.tsx b/static/app/components/modals/createDashboardFromMetricsModal.tsx
index 24afb35194711d..4bb37ad3ab516f 100644
--- a/static/app/components/modals/createDashboardFromMetricsModal.tsx
+++ b/static/app/components/modals/createDashboardFromMetricsModal.tsx
@@ -1,7 +1,6 @@
import {useEffect, useState} from 'react';
import {css} from '@emotion/react';
import styled from '@emotion/styled';
-import type {Location} from 'history';
import {createDashboard, updateDashboard} from 'sentry/actionCreators/dashboards';
import {addErrorMessage, addSuccessMessage} from 'sentry/actionCreators/indicator';
@@ -25,7 +24,6 @@ import {NEW_DASHBOARD_ID} from 'sentry/views/dashboards/widgetBuilder/utils';
import {OrganizationContext} from 'sentry/views/organizationContext';
export type AddToDashboardModalProps = {
- location: Location;
newDashboard: DashboardDetails;
organization: Organization;
router: InjectedRouter;
diff --git a/static/app/components/modals/createTeamModal.spec.tsx b/static/app/components/modals/createTeamModal.spec.tsx
index b986e7aa5759d7..ec45ac444744b9 100644
--- a/static/app/components/modals/createTeamModal.spec.tsx
+++ b/static/app/components/modals/createTeamModal.spec.tsx
@@ -21,7 +21,7 @@ describe('CreateTeamModal', function () {
});
it('calls createTeam action creator on submit', async function () {
- const styledWrapper = styled(c => c.children);
+ const styledWrapper = styled((c: {children: React.ReactNode}) => c.children);
render(
{props.children}
;
const api = new MockApiClient();
-function renderModal({initialData, widget}) {
+function renderModal({
+ initialData,
+ widget,
+}: {
+ initialData: ReturnType;
+ widget: Widget;
+}) {
return render(
AddDashboardWidgetModal', function () {
router: {},
projects: [],
});
- let mockQuery;
- let mockWidget;
+ let mockQuery!: Widget['queries'][number];
+ let mockWidget!: Widget;
beforeEach(function () {
mockQuery = {
@@ -41,7 +48,6 @@ describe('Modals -> AddDashboardWidgetModal', function () {
fields: ['count()', 'failure_count()'],
aggregates: ['count()', 'failure_count()'],
columns: [],
- id: '1',
name: 'Query Name',
orderby: '',
};
@@ -93,12 +99,10 @@ describe('Modals -> AddDashboardWidgetModal', function () {
mockWidget.queries.push({
...mockQuery,
conditions: 'title:/organizations/:orgId/performance/',
- id: '2',
});
mockWidget.queries.push({
...mockQuery,
conditions: 'title:/organizations/:orgId/',
- id: '3',
});
renderModal({initialData, widget: mockWidget});
const queryFields = screen.getAllByRole('textbox');
diff --git a/static/app/components/modals/dashboardWidgetQuerySelectorModal.tsx b/static/app/components/modals/dashboardWidgetQuerySelectorModal.tsx
index 5b725103ca7671..d1ece1c1f00853 100644
--- a/static/app/components/modals/dashboardWidgetQuerySelectorModal.tsx
+++ b/static/app/components/modals/dashboardWidgetQuerySelectorModal.tsx
@@ -1,4 +1,4 @@
-import {Component, Fragment} from 'react';
+import {Fragment} from 'react';
import {css} from '@emotion/react';
import styled from '@emotion/styled';
@@ -31,9 +31,10 @@ type Props = ModalRenderProps &
selection: PageFilters;
};
-class DashboardWidgetQuerySelectorModal extends Component {
- renderQueries() {
- const {organization, widget, selection, isMetricsData} = this.props;
+function DashboardWidgetQuerySelectorModal(props: Props) {
+ const {organization, widget, selection, isMetricsData, Body, Header} = props;
+
+ const renderQueries = () => {
const querySearchBars = widget.queries.map((query, index) => {
const discoverLocation = getWidgetDiscoverUrl(
{
@@ -72,26 +73,23 @@ class DashboardWidgetQuerySelectorModal extends Component {
);
});
return querySearchBars;
- }
+ };
- render() {
- const {Body, Header, widget} = this.props;
- return (
-
-
-
-
- {t(
- 'Multiple queries were used to create this widget visualization. Which query would you like to view in Discover?'
- )}
-
- {this.renderQueries()}
-
-
- );
- }
+ return (
+
+
+
+
+ {t(
+ 'Multiple queries were used to create this widget visualization. Which query would you like to view in Discover?'
+ )}
+
+ {renderQueries()}
+
+
+ );
}
const StyledInput = styled(Input)`
diff --git a/static/app/components/modals/featureTourModal.spec.tsx b/static/app/components/modals/featureTourModal.spec.tsx
index 35a9ffd7c34951..7c2764c4fbb0c7 100644
--- a/static/app/components/modals/featureTourModal.spec.tsx
+++ b/static/app/components/modals/featureTourModal.spec.tsx
@@ -21,7 +21,8 @@ const steps = [
];
describe('FeatureTourModal', function () {
- let onAdvance, onCloseModal;
+ let onAdvance!: jest.Mock;
+ let onCloseModal!: jest.Mock;
const createWrapper = (props = {}) =>
render(
@@ -70,13 +71,13 @@ describe('FeatureTourModal', function () {
await clickModal();
// Should start on the first step.
- expect(screen.getByRole('heading')).toHaveTextContent(steps[0].title);
+ expect(screen.getByRole('heading')).toHaveTextContent(steps[0]!.title);
// Advance to the next step.
await userEvent.click(screen.getByRole('button', {name: 'Next'}));
// Should move to next step.
- expect(screen.getByRole('heading')).toHaveTextContent(steps[1].title);
+ expect(screen.getByRole('heading')).toHaveTextContent(steps[1]!.title);
expect(onAdvance).toHaveBeenCalled();
});
@@ -86,7 +87,7 @@ describe('FeatureTourModal', function () {
await clickModal();
// Should show title, image and actions
- expect(screen.getByRole('heading')).toHaveTextContent(steps[0].title);
+ expect(screen.getByRole('heading')).toHaveTextContent(steps[0]!.title);
expect(screen.getByTestId('step-image')).toBeInTheDocument();
expect(screen.getByTestId('step-action')).toBeInTheDocument();
expect(screen.getByText('1 of 2')).toBeInTheDocument();
diff --git a/static/app/components/modals/featureTourModal.tsx b/static/app/components/modals/featureTourModal.tsx
index e0d07789f4356b..960f64fcc6329b 100644
--- a/static/app/components/modals/featureTourModal.tsx
+++ b/static/app/components/modals/featureTourModal.tsx
@@ -156,7 +156,8 @@ class ModalContents extends Component {
const {Body, steps, doneText, doneUrl, closeModal} = this.props;
const {current} = this.state;
- const step = steps[current] !== undefined ? steps[current] : steps[steps.length - 1];
+ const step =
+ steps[current] !== undefined ? steps[current]! : steps[steps.length - 1]!;
const hasNext = steps[current + 1] !== undefined;
return (
diff --git a/static/app/components/modals/importDashboardFromFileModal.tsx b/static/app/components/modals/importDashboardFromFileModal.tsx
index 2d28cc02a579a7..adb8fc4d9f5417 100644
--- a/static/app/components/modals/importDashboardFromFileModal.tsx
+++ b/static/app/components/modals/importDashboardFromFileModal.tsx
@@ -1,12 +1,16 @@
import {Fragment, useState} from 'react';
import {css} from '@emotion/react';
+import type {Location} from 'history';
import {createDashboard} from 'sentry/actionCreators/dashboards';
import {addErrorMessage, addSuccessMessage} from 'sentry/actionCreators/indicator';
+import type {ModalRenderProps} from 'sentry/actionCreators/modal';
+import type {Client} from 'sentry/api';
import {Button} from 'sentry/components/button';
import {CodeSnippet} from 'sentry/components/codeSnippet';
import {IconUpload} from 'sentry/icons';
import {t} from 'sentry/locale';
+import type {Organization} from 'sentry/types/organization';
import {browserHistory} from 'sentry/utils/browserHistory';
import normalizeUrl from 'sentry/utils/url/normalizeUrl';
import {
@@ -15,6 +19,12 @@ import {
} from 'sentry/views/dashboards/layoutUtils';
import {Wrapper} from 'sentry/views/discover/table/quickContext/styles';
+export interface ImportDashboardFromFileModalProps {
+ api: Client;
+ location: Location;
+ organization: Organization;
+}
+
// Internal feature - no specs written.
function ImportDashboardFromFileModal({
Header,
@@ -23,11 +33,11 @@ function ImportDashboardFromFileModal({
organization,
api,
location,
-}) {
+}: ModalRenderProps & ImportDashboardFromFileModalProps) {
const [dashboardData, setDashboardData] = useState('');
const [validated, setValidated] = useState(false);
- function validateFile(fileToUpload) {
+ function validateFile(fileToUpload: File) {
if (!fileToUpload || !(fileToUpload.type === 'application/json')) {
addErrorMessage('You must upload a JSON file');
setValidated(false);
diff --git a/static/app/components/modals/inviteMembersModal/index.spec.tsx b/static/app/components/modals/inviteMembersModal/index.spec.tsx
index 49a3e7f58aec2c..1953f0949d0837 100644
--- a/static/app/components/modals/inviteMembersModal/index.spec.tsx
+++ b/static/app/components/modals/inviteMembersModal/index.spec.tsx
@@ -15,7 +15,7 @@ import type {Scope} from 'sentry/types/core';
import type {DetailedTeam} from 'sentry/types/organization';
describe('InviteMembersModal', function () {
- const styledWrapper = styled(c => c.children);
+ const styledWrapper = styled((c: {children: React.ReactNode}) => c.children);
type MockApiResponseFn = (
client: typeof MockApiClient,
@@ -39,7 +39,7 @@ describe('InviteMembersModal', function () {
const defaultMockModalProps = {
Body: styledWrapper(),
- Header: p => {p.children} ,
+ Header: (p: {children?: React.ReactNode}) => {p.children} ,
Footer: styledWrapper(),
closeModal: () => {},
CloseButton: makeCloseButton(() => {}),
@@ -96,12 +96,12 @@ describe('InviteMembersModal', function () {
const emailInputs = screen.getAllByRole('textbox', {name: 'Email Addresses'});
const roleInputs = screen.getAllByRole('textbox', {name: 'Role'});
- await userEvent.type(emailInputs[0], 'test1@test.com');
+ await userEvent.type(emailInputs[0]!, 'test1@test.com');
await userEvent.tab();
- await selectEvent.select(roleInputs[0], 'Admin');
+ await selectEvent.select(roleInputs[0]!, 'Admin');
- await userEvent.type(emailInputs[1], 'test2@test.com');
+ await userEvent.type(emailInputs[1]!, 'test2@test.com');
await userEvent.tab();
};
@@ -162,10 +162,10 @@ describe('InviteMembersModal', function () {
await userEvent.click(screen.getByRole('button', {name: 'Add another'}));
const emailInputs = screen.getAllByRole('textbox', {name: 'Email Addresses'});
- await userEvent.type(emailInputs[0], 'test@test.com');
+ await userEvent.type(emailInputs[0]!, 'test@test.com');
await userEvent.tab();
- await userEvent.type(emailInputs[1], 'test@test.com');
+ await userEvent.type(emailInputs[1]!, 'test@test.com');
await userEvent.tab();
expect(screen.getByText('Duplicate emails between invite rows.')).toBeInTheDocument();
@@ -211,8 +211,8 @@ describe('InviteMembersModal', function () {
await setupMemberInviteState();
const teamInputs = screen.getAllByRole('textbox', {name: 'Add to Team'});
- await selectEvent.select(teamInputs[0], '#team-slug');
- await selectEvent.select(teamInputs[1], '#team-slug');
+ await selectEvent.select(teamInputs[0]!, '#team-slug');
+ await selectEvent.select(teamInputs[1]!, '#team-slug');
await userEvent.click(screen.getByRole('button', {name: 'Send invites (2)'}));
@@ -297,7 +297,11 @@ describe('InviteMembersModal', function () {
});
it('marks failed invites', async function () {
- const failedCreateMemberMock = (client, orgSlug, _) => {
+ const failedCreateMemberMock = (
+ client: typeof MockApiClient,
+ orgSlug: string,
+ _: any
+ ) => {
return client.addMockResponse({
url: `/organizations/${orgSlug}/members/`,
method: 'POST',
@@ -402,7 +406,11 @@ describe('InviteMembersModal', function () {
});
it('POSTS to the invite-request endpoint', async function () {
- const createInviteRequestMock = (client, orgSlug, _) => {
+ const createInviteRequestMock = (
+ client: typeof MockApiClient,
+ orgSlug: string,
+ _: any
+ ) => {
return client.addMockResponse({
url: `/organizations/${orgSlug}/invite-requests/`,
method: 'POST',
diff --git a/static/app/components/modals/inviteMembersModal/index.tsx b/static/app/components/modals/inviteMembersModal/index.tsx
index d32a3f20e192c5..e50f4c5246a6fb 100644
--- a/static/app/components/modals/inviteMembersModal/index.tsx
+++ b/static/app/components/modals/inviteMembersModal/index.tsx
@@ -100,7 +100,7 @@ function InviteMembersModal({
sendInvites: inviteModalSendInvites,
reset,
inviteStatus,
- pendingInvites: pendingInvites[0],
+ pendingInvites: pendingInvites[0]!,
sendingInvites,
complete,
error,
diff --git a/static/app/components/modals/inviteMembersModal/inviteRowControl.tsx b/static/app/components/modals/inviteMembersModal/inviteRowControl.tsx
index cd73db279ef038..dc524d6bfbfdef 100644
--- a/static/app/components/modals/inviteMembersModal/inviteRowControl.tsx
+++ b/static/app/components/modals/inviteMembersModal/inviteRowControl.tsx
@@ -96,7 +96,7 @@ function InviteRowControl({
value={emails}
components={{
MultiValue: (props: MultiValueProps) => (
-
+
),
DropdownIndicator: () => null,
}}
diff --git a/static/app/components/modals/inviteMembersModal/inviteRowControlNew.tsx b/static/app/components/modals/inviteMembersModal/inviteRowControlNew.tsx
index 4326a144ba9a8f..61aebc3da2ca18 100644
--- a/static/app/components/modals/inviteMembersModal/inviteRowControlNew.tsx
+++ b/static/app/components/modals/inviteMembersModal/inviteRowControlNew.tsx
@@ -94,7 +94,7 @@ function InviteRowControl({roleDisabledUnallowed, roleOptions}: Props) {
value={emails}
components={{
MultiValue: (props: MultiValueProps) => (
-
+
),
DropdownIndicator: () => null,
}}
diff --git a/static/app/components/modals/inviteMembersModal/useInviteModal.tsx b/static/app/components/modals/inviteMembersModal/useInviteModal.tsx
index 3c5338b4534d90..98bd2faceee03b 100644
--- a/static/app/components/modals/inviteMembersModal/useInviteModal.tsx
+++ b/static/app/components/modals/inviteMembersModal/useInviteModal.tsx
@@ -29,9 +29,7 @@ function defaultInvite(): InviteRow {
function canInvite(organization: Organization) {
return (
organization.access?.includes('member:write') ||
- (organization.features.includes('members-invite-teammates') &&
- organization.allowMemberInvite &&
- organization.access?.includes('member:invite'))
+ (organization.allowMemberInvite && organization.access?.includes('member:invite'))
);
}
@@ -165,7 +163,7 @@ export default function useInviteModal({organization, initialData, source}: Prop
const removeSentInvites = useCallback(() => {
setState(prev => {
- const emails = prev.pendingInvites[0].emails;
+ const emails = prev.pendingInvites[0]!.emails;
const filteredEmails = Array.from(emails).filter(
email => !prev.inviteStatus[email]?.sent
);
@@ -173,7 +171,7 @@ export default function useInviteModal({organization, initialData, source}: Prop
...prev,
pendingInvites: [
{
- ...prev.pendingInvites[0],
+ ...prev.pendingInvites[0]!,
emails: new Set(filteredEmails),
},
],
@@ -220,7 +218,7 @@ export default function useInviteModal({organization, initialData, source}: Prop
const setEmails = useCallback((emails: string[], index: number) => {
setState(prev => {
const pendingInvites = [...prev.pendingInvites];
- pendingInvites[index] = {...pendingInvites[index], emails: new Set(emails)};
+ pendingInvites[index] = {...pendingInvites[index]!, emails: new Set(emails)};
return {...prev, pendingInvites};
});
@@ -229,7 +227,7 @@ export default function useInviteModal({organization, initialData, source}: Prop
const setTeams = useCallback((teams: string[], index: number) => {
setState(prev => {
const pendingInvites = [...prev.pendingInvites];
- pendingInvites[index] = {...pendingInvites[index], teams: new Set(teams)};
+ pendingInvites[index] = {...pendingInvites[index]!, teams: new Set(teams)};
return {...prev, pendingInvites};
});
@@ -238,7 +236,7 @@ export default function useInviteModal({organization, initialData, source}: Prop
const setRole = useCallback((role: string, index: number) => {
setState(prev => {
const pendingInvites = [...prev.pendingInvites];
- pendingInvites[index] = {...pendingInvites[index], role};
+ pendingInvites[index] = {...pendingInvites[index]!, role};
return {...prev, pendingInvites};
});
diff --git a/static/app/components/modals/inviteMissingMembersModal/index.spec.tsx b/static/app/components/modals/inviteMissingMembersModal/index.spec.tsx
index 8a462fcfbe2316..620f8a7b72d3bf 100644
--- a/static/app/components/modals/inviteMissingMembersModal/index.spec.tsx
+++ b/static/app/components/modals/inviteMissingMembersModal/index.spec.tsx
@@ -188,12 +188,12 @@ describe('InviteMissingMembersModal', function () {
const teamInputs = screen.getAllByRole('textbox', {name: 'Add to Team'});
await userEvent.click(screen.getByLabelText('Select hello@sentry.io'));
- await selectEvent.select(roleInputs[0], 'Admin', {
+ await selectEvent.select(roleInputs[0]!, 'Admin', {
container: document.body,
});
await userEvent.click(screen.getByLabelText('Select abcd@sentry.io'));
- await selectEvent.select(teamInputs[1], '#team-slug', {
+ await selectEvent.select(teamInputs[1]!, '#team-slug', {
container: document.body,
});
diff --git a/static/app/components/modals/inviteMissingMembersModal/index.tsx b/static/app/components/modals/inviteMissingMembersModal/index.tsx
index 9c89de5ec73a09..4ba3e48f86a8b7 100644
--- a/static/app/components/modals/inviteMissingMembersModal/index.tsx
+++ b/static/app/components/modals/inviteMissingMembersModal/index.tsx
@@ -67,9 +67,9 @@ export function InviteMissingMembersModal({
(role: string, index: number) => {
setMemberInvites(prevInvites => {
const invites = prevInvites.map(i => ({...i}));
- invites[index].role = role;
- if (!allowedRolesMap[role].isTeamRolesAllowed) {
- invites[index].teamSlugs = new Set([]);
+ invites[index]!.role = role;
+ if (!allowedRolesMap[role]!.isTeamRolesAllowed) {
+ invites[index]!.teamSlugs = new Set([]);
}
return invites;
});
@@ -80,7 +80,7 @@ export function InviteMissingMembersModal({
const setTeams = useCallback((teamSlugs: string[], index: number) => {
setMemberInvites(prevInvites => {
const invites = prevInvites.map(i => ({...i}));
- invites[index].teamSlugs = new Set(teamSlugs);
+ invites[index]!.teamSlugs = new Set(teamSlugs);
return invites;
});
}, []);
@@ -96,7 +96,7 @@ export function InviteMissingMembersModal({
const toggleCheckbox = useCallback(
(checked: boolean, index: number) => {
const selectedMembers = [...memberInvites];
- selectedMembers[index].selected = checked;
+ selectedMembers[index]!.selected = checked;
setMemberInvites(selectedMembers);
},
[memberInvites]
@@ -234,7 +234,7 @@ export function InviteMissingMembersModal({
stickyHeaders
>
{memberInvites?.map((member, i) => {
- const checked = memberInvites[i].selected;
+ const checked = memberInvites[i]!.selected;
const username = member.externalId.split(':').pop();
const isTeamRolesAllowed =
allowedRolesMap[member.role]?.isTeamRolesAllowed ?? true;
diff --git a/static/app/components/modals/metricWidgetViewerModal.tsx b/static/app/components/modals/metricWidgetViewerModal.tsx
index c3bb40feeedba8..960085fb4c27ce 100644
--- a/static/app/components/modals/metricWidgetViewerModal.tsx
+++ b/static/app/components/modals/metricWidgetViewerModal.tsx
@@ -120,7 +120,7 @@ function MetricWidgetViewerModal({
if (!updatedQuery.alias) {
updatedQuery.alias = updatedAlias;
}
- if (isVirtualAlias(currentQuery.alias) && isVirtualAlias(updatedQuery.alias)) {
+ if (isVirtualAlias(currentQuery!.alias) && isVirtualAlias(updatedQuery.alias)) {
updatedQuery.alias = updatedAlias;
}
}
@@ -182,7 +182,7 @@ function MetricWidgetViewerModal({
? curr.map(q => ({...q, isHidden: true}))
: curr),
{
- ...query,
+ ...query!,
id: generateQueryId(),
},
];
@@ -241,7 +241,7 @@ function MetricWidgetViewerModal({
updated.splice(index, 1);
// Make sure the last query is visible for big number widgets
if (displayType === DisplayType.BIG_NUMBER && filteredEquations.length === 0) {
- updated[updated.length - 1].isHidden = false;
+ updated[updated.length - 1]!.isHidden = false;
}
return updated;
});
@@ -308,7 +308,7 @@ function MetricWidgetViewerModal({
closeModal();
}, [userHasModified, closeModal, organization]);
- const {mri, aggregation, query, condition} = metricQueries[0];
+ const {mri, aggregation, query, condition} = metricQueries[0]!;
if (isLoading) {
return ;
diff --git a/static/app/components/modals/metricWidgetViewerModal/queries.tsx b/static/app/components/modals/metricWidgetViewerModal/queries.tsx
index f17bc9d99b367c..08a88d7135d61d 100644
--- a/static/app/components/modals/metricWidgetViewerModal/queries.tsx
+++ b/static/app/components/modals/metricWidgetViewerModal/queries.tsx
@@ -86,7 +86,7 @@ export const Queries = memo(function Queries({
const handleEditQueryAlias = useCallback(
(index: number) => {
- const query = metricQueries[index];
+ const query = metricQueries[index]!;
const alias = getMetricQueryName(query);
onQueryChange({alias}, index);
@@ -96,7 +96,7 @@ export const Queries = memo(function Queries({
const handleEditEquationAlias = useCallback(
(index: number) => {
- const equation = metricEquations[index];
+ const equation = metricEquations[index]!;
const alias = getMetricQueryName(equation);
onEquationChange({alias: alias ?? ''}, index);
diff --git a/static/app/components/modals/metricWidgetViewerModal/visualization.tsx b/static/app/components/modals/metricWidgetViewerModal/visualization.tsx
index c8501a6cb5b123..c5b7fab35b396d 100644
--- a/static/app/components/modals/metricWidgetViewerModal/visualization.tsx
+++ b/static/app/components/modals/metricWidgetViewerModal/visualization.tsx
@@ -97,7 +97,7 @@ function useFocusedSeries({
const setSeriesVisibility = useCallback(
(series: FocusedMetricsSeries) => {
onChange?.();
- if (focusedSeries?.length === 1 && focusedSeries[0].id === series.id) {
+ if (focusedSeries?.length === 1 && focusedSeries[0]!.id === series.id) {
setFocusedSeries([]);
return;
}
diff --git a/static/app/components/modals/sentryAppPublishRequestModal.tsx b/static/app/components/modals/sentryAppPublishRequestModal.tsx
index 786ab5c2f00acd..c549bb05e48a1c 100644
--- a/static/app/components/modals/sentryAppPublishRequestModal.tsx
+++ b/static/app/components/modals/sentryAppPublishRequestModal.tsx
@@ -1,4 +1,4 @@
-import {Component, Fragment} from 'react';
+import {Fragment, useState} from 'react';
import styled from '@emotion/styled';
import intersection from 'lodash/intersection';
@@ -59,11 +59,11 @@ type Props = ModalRenderProps & {
app: SentryApp;
};
-export default class SentryAppPublishRequestModal extends Component {
- form = new FormModel({transformData});
+export default function SentryAppPublishRequestModal(props: Props) {
+ const [form] = useState(() => new FormModel({transformData}));
+ const {app, closeModal, Header, Body} = props;
- get formFields() {
- const {app} = this.props;
+ const formFields = () => {
const permissions = getPermissionSelectionsFromScopes(app.scopes);
const permissionQuestionBaseText =
@@ -132,57 +132,54 @@ export default class SentryAppPublishRequestModal extends Component {
}
return baseFields;
- }
+ };
- handleSubmitSuccess = () => {
- addSuccessMessage(t('Request to publish %s successful.', this.props.app.slug));
- this.props.closeModal();
+ const handleSubmitSuccess = () => {
+ addSuccessMessage(t('Request to publish %s successful.', app.slug));
+ closeModal();
};
- handleSubmitError = err => {
+ const handleSubmitError = err => {
addErrorMessage(
tct('Request to publish [app] fails. [detail]', {
- app: this.props.app.slug,
+ app: app.slug,
detail: err?.responseJSON?.detail,
})
);
};
- render() {
- const {app, Header, Body} = this.props;
- const endpoint = `/sentry-apps/${app.slug}/publish-request/`;
- const forms = [
- {
- title: t('Questions to answer'),
- fields: this.formFields,
- },
- ];
- return (
-
- {t('Publish Request Questionnaire')}
-
-
- {t(
- `Please fill out this questionnaire in order to get your integration evaluated for publication.
+ const endpoint = `/sentry-apps/${app.slug}/publish-request/`;
+ const forms = [
+ {
+ title: t('Questions to answer'),
+ fields: formFields(),
+ },
+ ];
+ return (
+
+ {t('Publish Request Questionnaire')}
+
+
+ {t(
+ `Please fill out this questionnaire in order to get your integration evaluated for publication.
Once your integration has been approved, users outside of your organization will be able to install it.`
- )}
-
-
-
-
- );
- }
+ )}
+
+
+
+
+ );
}
const Explanation = styled('div')`
diff --git a/static/app/components/modals/suggestProjectModal.tsx b/static/app/components/modals/suggestProjectModal.tsx
index 5afee43327c48a..1afbde062081a9 100644
--- a/static/app/components/modals/suggestProjectModal.tsx
+++ b/static/app/components/modals/suggestProjectModal.tsx
@@ -1,4 +1,4 @@
-import {Component, Fragment} from 'react';
+import {Fragment, useState} from 'react';
import styled from '@emotion/styled';
import * as qs from 'query-string';
@@ -30,38 +30,30 @@ type Props = ModalRenderProps & {
organization: Organization;
};
-type State = {
- askTeammate: boolean;
-};
-
-class SuggestProjectModal extends Component {
- state: State = {
- askTeammate: false,
- };
+function SuggestProjectModal(props: Props) {
+ const [askTeammate, setAskTeammate] = useState(false);
+ const {matchedUserAgentString, organization, closeModal, Body, Header, Footer} = props;
- handleGetStartedClick = () => {
- const {matchedUserAgentString, organization} = this.props;
+ const handleGetStartedClick = () => {
trackAnalytics('growth.clicked_mobile_prompt_setup_project', {
matchedUserAgentString,
organization,
});
};
- handleAskTeammate = () => {
- const {matchedUserAgentString, organization} = this.props;
- this.setState({askTeammate: true});
+ const handleAskTeammate = () => {
+ setAskTeammate(true);
trackAnalytics('growth.clicked_mobile_prompt_ask_teammate', {
matchedUserAgentString,
organization,
});
};
- goBack = () => {
- this.setState({askTeammate: false});
+ const goBack = () => {
+ setAskTeammate(false);
};
- handleSubmitSuccess = () => {
- const {matchedUserAgentString, organization, closeModal} = this.props;
+ const handleSubmitSuccess = () => {
addSuccessMessage('Notified teammate successfully');
trackAnalytics('growth.submitted_mobile_prompt_ask_teammate', {
matchedUserAgentString,
@@ -70,28 +62,27 @@ class SuggestProjectModal extends Component {
closeModal();
};
- handlePreSubmit = () => {
+ const handlePreSubmit = () => {
addLoadingMessage(t('Submitting\u2026'));
};
- handleSubmitError = () => {
+ const handleSubmitError = () => {
addErrorMessage(t('Error notifying teammate'));
};
- renderAskTeammate() {
- const {Body, organization} = this.props;
+ const renderAskTeammate = () => {
return (
);
- }
-
- renderMain() {
- const {Body, Footer, organization} = this.props;
+ };
+ const renderMain = () => {
const paramString = qs.stringify({
referrer: 'suggest_project',
category: 'mobile',
@@ -167,14 +156,14 @@ class SuggestProjectModal extends Component {
{t('Tell a Teammate')}
{hasAccess && (
{t('Get Started')}
@@ -186,22 +175,18 @@ class SuggestProjectModal extends Component {
);
- }
+ };
- render() {
- const {Header} = this.props;
- const {askTeammate} = this.state;
- const header = askTeammate ? t('Tell a Teammate') : t('Try Sentry for Mobile');
- return (
-
-
- {this.state.askTeammate ? this.renderAskTeammate() : this.renderMain()}
-
- );
- }
+ const header = askTeammate ? t('Tell a Teammate') : t('Try Sentry for Mobile');
+ return (
+
+
+ {askTeammate ? renderAskTeammate() : renderMain()}
+
+ );
}
const ModalContainer = styled('div')`
diff --git a/static/app/components/modals/teamAccessRequestModal.tsx b/static/app/components/modals/teamAccessRequestModal.tsx
index 0c78b751a3481e..5baa228a9eb590 100644
--- a/static/app/components/modals/teamAccessRequestModal.tsx
+++ b/static/app/components/modals/teamAccessRequestModal.tsx
@@ -1,4 +1,4 @@
-import {Component, Fragment} from 'react';
+import {Fragment, useState} from 'react';
import styled from '@emotion/styled';
import {addErrorMessage, addSuccessMessage} from 'sentry/actionCreators/indicator';
@@ -21,22 +21,12 @@ export interface CreateTeamAccessRequestModalProps
teamId: string;
}
-type State = {
- createBusy: boolean;
-};
+function CreateTeamAccessRequestModal(props: CreateTeamAccessRequestModalProps) {
+ const [createBusy, setCreateBusy] = useState(false);
+ const {api, memberId, orgId, teamId, closeModal, Body, Footer} = props;
-class CreateTeamAccessRequestModal extends Component<
- CreateTeamAccessRequestModalProps,
- State
-> {
- state: State = {
- createBusy: false,
- };
-
- handleClick = async () => {
- const {api, memberId, orgId, teamId, closeModal} = this.props;
-
- this.setState({createBusy: true});
+ const handleClick = async () => {
+ setCreateBusy(true);
try {
await api.requestPromise(
@@ -49,37 +39,28 @@ class CreateTeamAccessRequestModal extends Component<
} catch (err) {
addErrorMessage(t('Unable to send team request'));
}
- this.setState({createBusy: false});
+ setCreateBusy(false);
closeModal();
};
- render() {
- const {Body, Footer, closeModal, teamId} = this.props;
-
- return (
-
-
- {tct(
- 'You do not have permission to add members to the #[team] team, but we will send a request to your organization admins for approval.',
- {team: teamId}
- )}
-
-
-
- {t('Cancel')}
-
- {t('Continue')}
-
-
-
-
- );
- }
+ return (
+
+
+ {tct(
+ 'You do not have permission to add members to the #[team] team, but we will send a request to your organization admins for approval.',
+ {team: teamId}
+ )}
+
+
+
+ {t('Cancel')}
+
+ {t('Continue')}
+
+
+
+
+ );
}
const ButtonGroup = styled('div')`
diff --git a/static/app/components/modals/widgetBuilder/addToDashboardModal.spec.tsx b/static/app/components/modals/widgetBuilder/addToDashboardModal.spec.tsx
index 94543d87695743..e784b084790a7a 100644
--- a/static/app/components/modals/widgetBuilder/addToDashboardModal.spec.tsx
+++ b/static/app/components/modals/widgetBuilder/addToDashboardModal.spec.tsx
@@ -34,8 +34,8 @@ const mockWidgetAsQueryParams = {
};
describe('add to dashboard modal', () => {
- let eventsStatsMock;
- let initialData;
+ let eventsStatsMock!: jest.Mock;
+ let initialData!: ReturnType;
const testDashboardListItem: DashboardListItem = {
id: '1',
diff --git a/static/app/components/modals/widgetBuilder/addToDashboardModal.tsx b/static/app/components/modals/widgetBuilder/addToDashboardModal.tsx
index 7da42feb17ae34..0de52ea3a08f65 100644
--- a/static/app/components/modals/widgetBuilder/addToDashboardModal.tsx
+++ b/static/app/components/modals/widgetBuilder/addToDashboardModal.tsx
@@ -171,11 +171,11 @@ function AddToDashboardModal({
return;
}
- let orderby = widget.queries[0].orderby;
- if (!(DisplayType.AREA && widget.queries[0].columns.length)) {
+ let orderby = widget.queries[0]!.orderby;
+ if (!(DisplayType.AREA && widget.queries[0]!.columns.length)) {
orderby = ''; // Clear orderby if its not a top n visualization.
}
- const query = widget.queries[0];
+ const query = widget.queries[0]!;
const title =
// Metric widgets have their default title derived from the query
@@ -278,7 +278,7 @@ function AddToDashboardModal({
diff --git a/static/app/components/modals/widgetViewerModal.spec.tsx b/static/app/components/modals/widgetViewerModal.spec.tsx
index 8edf9f12b1edd4..cfd269086ed56f 100644
--- a/static/app/components/modals/widgetViewerModal.spec.tsx
+++ b/static/app/components/modals/widgetViewerModal.spec.tsx
@@ -96,8 +96,9 @@ async function renderModal({
}
describe('Modals -> WidgetViewerModal', function () {
- let initialData, initialDataWithFlag;
- let widgetLegendState: WidgetLegendSelectionState;
+ let initialData!: ReturnType;
+ let initialDataWithFlag!: ReturnType;
+ let widgetLegendState!: WidgetLegendSelectionState;
beforeEach(() => {
initialData = initializeOrg({
organization: {
@@ -442,71 +443,6 @@ describe('Modals -> WidgetViewerModal', function () {
);
});
- it('renders widget chart minimap', async function () {
- initialData.organization.features.push('widget-viewer-modal-minimap');
- mockEvents();
- await renderModal({
- initialData,
- widget: {
- ...mockWidget,
- queries: [{...mockQuery, name: ''}, additionalMockQuery],
- },
- });
-
- expect(ReactEchartsCore).toHaveBeenLastCalledWith(
- expect.objectContaining({
- option: expect.objectContaining({
- dataZoom: expect.arrayContaining([
- expect.objectContaining({
- realtime: false,
- showDetail: false,
- end: 100,
- start: 0,
- }),
- ]),
- }),
- }),
- {}
- );
- });
-
- it('zooming on minimap updates location query and updates echart start and end values', async function () {
- initialData.organization.features.push('widget-viewer-modal-minimap');
- mockEvents();
- await renderModal({
- initialData,
- widget: {
- ...mockWidget,
- queries: [{...mockQuery, name: ''}, additionalMockQuery],
- },
- });
- const calls = (ReactEchartsCore as jest.Mock).mock.calls;
- act(() => {
- // Simulate dataZoom event on chart
- calls[calls.length - 1][0].onEvents.datazoom(
- {seriesStart: 1646100000000, seriesEnd: 1646120000000},
- {
- getModel: () => {
- return {
- _payload: {start: 30, end: 70},
- };
- },
- }
- );
- });
-
- await waitFor(() =>
- expect(initialData.router.push).toHaveBeenCalledWith(
- expect.objectContaining({
- query: {
- viewerEnd: '2022-03-01T05:53:20',
- viewerStart: '2022-03-01T03:40:00',
- },
- })
- )
- );
- });
-
it('includes group by in widget viewer table', async function () {
mockEvents();
mockWidget.queries = [
@@ -566,7 +502,7 @@ describe('Modals -> WidgetViewerModal', function () {
const calls = (ReactEchartsCore as jest.Mock).mock.calls;
const yAxisFormatter =
calls[calls.length - 1][0].option.yAxis.axisLabel.formatter;
- expect(yAxisFormatter(123)).toEqual('123ms');
+ expect(yAxisFormatter(123)).toBe('123ms');
});
it('renders widget chart with default number y axis formatter when seriesResultType has multiple different types', async function () {
@@ -580,7 +516,7 @@ describe('Modals -> WidgetViewerModal', function () {
const calls = (ReactEchartsCore as jest.Mock).mock.calls;
const yAxisFormatter =
calls[calls.length - 1][0].option.yAxis.axisLabel.formatter;
- expect(yAxisFormatter(123)).toEqual('123');
+ expect(yAxisFormatter(123)).toBe('123');
});
it('does not allow sorting by transaction name when widget is using metrics', async function () {
@@ -675,7 +611,8 @@ describe('Modals -> WidgetViewerModal', function () {
});
describe('TopN Chart Widget', function () {
- let mockQuery, mockWidget;
+ let mockQuery!: Widget['queries'][number];
+ let mockWidget!: Widget;
function mockEventsStats() {
return MockApiClient.addMockResponse({
@@ -746,7 +683,6 @@ describe('Modals -> WidgetViewerModal', function () {
fields: ['error.type', 'count()'],
aggregates: ['count()'],
columns: ['error.type'],
- id: '1',
name: 'Query Name',
orderby: '',
};
@@ -911,77 +847,6 @@ describe('Modals -> WidgetViewerModal', function () {
await waitForMetaToHaveBeenCalled();
expect(eventsStatsMock).toHaveBeenCalledTimes(1);
});
-
- it('renders widget chart minimap', async function () {
- mockEventsStats();
- mockEvents();
- initialData.organization.features.push('widget-viewer-modal-minimap');
- await renderModal({initialData, widget: mockWidget});
-
- expect(ReactEchartsCore).toHaveBeenLastCalledWith(
- expect.objectContaining({
- option: expect.objectContaining({
- dataZoom: expect.arrayContaining([
- expect.objectContaining({
- realtime: false,
- showDetail: false,
- end: 100,
- start: 0,
- }),
- ]),
- }),
- }),
- {}
- );
- });
-
- it('zooming on minimap updates location query and updates echart start and end values', async function () {
- mockEventsStats();
- mockEvents();
- initialData.organization.features.push('widget-viewer-modal-minimap');
- await renderModal({initialData, widget: mockWidget});
- const calls = (ReactEchartsCore as jest.Mock).mock.calls;
- act(() => {
- // Simulate dataZoom event on chart
- calls[calls.length - 1][0].onEvents.datazoom(
- {seriesStart: 1646100000000, seriesEnd: 1646120000000},
- {
- getModel: () => {
- return {
- _payload: {start: 30, end: 70},
- };
- },
- }
- );
- });
-
- expect(initialData.router.push).toHaveBeenCalledWith(
- expect.objectContaining({
- query: {
- viewerEnd: '2022-03-01T05:53:20',
- viewerStart: '2022-03-01T03:40:00',
- },
- })
- );
-
- await waitFor(() => {
- expect(ReactEchartsCore).toHaveBeenLastCalledWith(
- expect.objectContaining({
- option: expect.objectContaining({
- dataZoom: expect.arrayContaining([
- expect.objectContaining({
- realtime: false,
- showDetail: false,
- endValue: 1646114000000,
- startValue: 1646106000000,
- }),
- ]),
- }),
- }),
- {}
- );
- });
- });
});
describe('Table Widget', function () {
@@ -1137,7 +1002,7 @@ describe('Modals -> WidgetViewerModal', function () {
});
describe('Issue Table Widget', function () {
- let issuesMock;
+ let issuesMock!: jest.Mock;
const mockQuery = {
conditions: 'is:unresolved',
fields: ['events', 'status', 'title'],
@@ -1353,7 +1218,7 @@ describe('Modals -> WidgetViewerModal', function () {
});
describe('Release Health Widgets', function () {
- let metricsMock;
+ let metricsMock!: jest.Mock;
const mockQuery = {
conditions: '',
fields: [`sum(session)`],
diff --git a/static/app/components/modals/widgetViewerModal.tsx b/static/app/components/modals/widgetViewerModal.tsx
index f343822c3a8797..1eab9c09e70e7f 100644
--- a/static/app/components/modals/widgetViewerModal.tsx
+++ b/static/app/components/modals/widgetViewerModal.tsx
@@ -1,10 +1,9 @@
-import {Fragment, memo, useEffect, useMemo, useRef, useState} from 'react';
+import {Fragment, memo, useEffect, useMemo, useState} from 'react';
import {components} from 'react-select';
import {css} from '@emotion/react';
import styled from '@emotion/styled';
import * as Sentry from '@sentry/react';
import {truncate} from '@sentry/utils';
-import type {DataZoomComponentOption} from 'echarts';
import type {Location} from 'history';
import cloneDeep from 'lodash/cloneDeep';
import isEqual from 'lodash/isEqual';
@@ -84,8 +83,7 @@ import {
SESSION_DURATION_ALERT,
WidgetDescription,
} from 'sentry/views/dashboards/widgetCard';
-import type {AugmentedEChartDataZoomHandler} from 'sentry/views/dashboards/widgetCard/chart';
-import WidgetCardChart, {SLIDER_HEIGHT} from 'sentry/views/dashboards/widgetCard/chart';
+import WidgetCardChart from 'sentry/views/dashboards/widgetCard/chart';
import {
DashboardsMEPProvider,
useDashboardsMEPContext,
@@ -140,19 +138,10 @@ const shouldWidgetCardChartMemo = (prevProps, props) => {
const sortMatches =
props.location.query[WidgetViewerQueryField.SORT] ===
prevProps.location.query[WidgetViewerQueryField.SORT];
- const chartZoomOptionsMatches = isEqual(
- props.chartZoomOptions,
- prevProps.chartZoomOptions
- );
const isNotTopNWidget =
props.widget.displayType !== DisplayType.TOP_N && !defined(props.widget.limit);
const legendMatches = isEqual(props.legendOptions, prevProps.legendOptions);
- return (
- selectionMatches &&
- chartZoomOptionsMatches &&
- (sortMatches || isNotTopNWidget) &&
- legendMatches
- );
+ return selectionMatches && (sortMatches || isNotTopNWidget) && legendMatches;
};
// WidgetCardChartContainer and WidgetCardChart rerenders if selection was changed.
@@ -210,7 +199,6 @@ function WidgetViewerModal(props: Props) {
const location = useLocation();
const {projects} = useProjects();
const navigate = useNavigate();
- const shouldShowSlider = organization.features.includes('widget-viewer-modal-minimap');
// TODO(Tele-Team): Re-enable this when we have a better way to determine if the data is transaction only
// let widgetContentLoadingStatus: boolean | undefined = undefined;
// Get widget zoom from location
@@ -234,33 +222,15 @@ function WidgetViewerModal(props: Props) {
const [chartUnmodified, setChartUnmodified] = useState(true);
- const [chartZoomOptions, setChartZoomOptions] = useState({
- start: 0,
- end: 100,
- });
-
- // We wrap the modalChartSelection in a useRef because we do not want to recalculate this value
- // (which would cause an unnecessary rerender on calculation) except for the initial load.
- // We use this for when a user visit a widget viewer url directly.
- const [modalTableSelection, setModalTableSelection] =
- useState(locationPageFilter);
- const modalChartSelection = useRef(modalTableSelection);
+ const [modalSelection, setModalSelection] = useState(locationPageFilter);
// Detect when a user clicks back and set the PageFilter state to match the location
- // We need to use useEffect to prevent infinite looping rerenders due to the setModalTableSelection call
+ // We need to use useEffect to prevent infinite looping rerenders due to the setModalSelection call
useEffect(() => {
if (location.action === 'POP') {
- setModalTableSelection(locationPageFilter);
- if (start && end) {
- setChartZoomOptions({
- startValue: moment.utc(start).unix() * 1000,
- endValue: moment.utc(end).unix() * 1000,
- });
- } else {
- setChartZoomOptions({start: 0, end: 100});
- }
+ setModalSelection(locationPageFilter);
}
- }, [end, location, locationPageFilter, start]);
+ }, [location, locationPageFilter]);
const [totalResults, setTotalResults] = useState();
@@ -297,16 +267,16 @@ function WidgetViewerModal(props: Props) {
// Create Table widget
const tableWidget = {
- ...cloneDeep({...widget, queries: [sortedQueries[selectedQueryIndex]]}),
+ ...cloneDeep({...widget, queries: [sortedQueries[selectedQueryIndex]!]}),
displayType: DisplayType.TABLE,
};
- const {aggregates, columns} = tableWidget.queries[0];
- const {orderby} = widget.queries[0];
+ const {aggregates, columns} = tableWidget.queries[0]!;
+ const {orderby} = widget.queries[0]!;
const order = orderby.startsWith('-');
const rawOrderby = trimStart(orderby, '-');
- const fields = defined(tableWidget.queries[0].fields)
- ? tableWidget.queries[0].fields
+ const fields = defined(tableWidget.queries[0]!.fields)
+ ? tableWidget.queries[0]!.fields
: [...columns, ...aggregates];
// Some Discover Widgets (Line, Area, Bar) allow the user to specify an orderby
@@ -339,8 +309,8 @@ function WidgetViewerModal(props: Props) {
// Need to set the orderby of the eventsv2 query to equation[index] format
// since eventsv2 does not accept the raw equation as a valid sort payload
- if (isEquation(rawOrderby) && tableWidget.queries[0].orderby === orderby) {
- tableWidget.queries[0].orderby = `${order ? '-' : ''}equation[${
+ if (isEquation(rawOrderby) && tableWidget.queries[0]!.orderby === orderby) {
+ tableWidget.queries[0]!.orderby = `${order ? '-' : ''}equation[${
getNumEquations(fields) - 1
}]`;
}
@@ -381,8 +351,8 @@ function WidgetViewerModal(props: Props) {
switch (widget.widgetType) {
case WidgetType.DISCOVER:
if (fields.length === 1) {
- tableWidget.queries[0].orderby =
- tableWidget.queries[0].orderby || `-${fields[0]}`;
+ tableWidget.queries[0]!.orderby =
+ tableWidget.queries[0]!.orderby || `-${fields[0]}`;
}
fields.unshift('title');
columns.unshift('title');
@@ -398,8 +368,8 @@ function WidgetViewerModal(props: Props) {
const eventView = eventViewFromWidget(
tableWidget.title,
- tableWidget.queries[0],
- modalTableSelection
+ tableWidget.queries[0]!,
+ modalSelection
);
let columnOrder = decodeColumnOrder(
@@ -410,7 +380,7 @@ function WidgetViewerModal(props: Props) {
const columnSortBy = eventView.getSorts();
columnOrder = columnOrder.map((column, index) => ({
...column,
- width: parseInt(widths[index], 10) || -1,
+ width: parseInt(widths[index]!, 10) || -1,
}));
const getOnDemandFilterWarning = createOnDemandFilterWarning(
@@ -685,7 +655,7 @@ function WidgetViewerModal(props: Props) {
onResizeColumn,
}}
/>
- {!tableWidget.queries[0].orderby.match(/^-?release$/) &&
+ {!tableWidget.queries[0]!.orderby.match(/^-?release$/) &&
(links?.previous?.results || links?.next?.results) && (
{
- // @ts-expect-error getModel() is private but we need this to retrieve datetime values of zoomed in region
+ const onZoom = (_evt, chart) => {
const model = chart.getModel();
- const {seriesStart, seriesEnd} = evt;
- let startValue, endValue;
- startValue = model._payload.batch?.[0].startValue;
- endValue = model._payload.batch?.[0].endValue;
- const seriesStartTime = seriesStart ? new Date(seriesStart).getTime() : undefined;
- const seriesEndTime = seriesEnd ? new Date(seriesEnd).getTime() : undefined;
- // Slider zoom events don't contain the raw date time value, only the percentage
- // We use the percentage with the start and end of the series to calculate the adjusted zoom
- if (startValue === undefined || endValue === undefined) {
- if (seriesStartTime && seriesEndTime) {
- const diff = seriesEndTime - seriesStartTime;
- startValue = diff * model._payload.start * 0.01 + seriesStartTime;
- endValue = diff * model._payload.end * 0.01 + seriesStartTime;
- } else {
- return;
- }
- }
- setChartZoomOptions({startValue, endValue});
+ const {startValue, endValue} = model._payload.batch[0];
const newStart = getUtcDateString(moment.utc(startValue));
const newEnd = getUtcDateString(moment.utc(endValue));
- setModalTableSelection({
- ...modalTableSelection,
+ setModalSelection({
+ ...modalSelection,
datetime: {
- ...modalTableSelection.datetime,
+ ...modalSelection.datetime,
start: newStart,
end: newEnd,
period: null,
@@ -776,7 +728,7 @@ function WidgetViewerModal(props: Props) {
api={api}
organization={organization}
widget={tableWidget}
- selection={modalTableSelection}
+ selection={modalSelection}
limit={
widget.displayType === DisplayType.TABLE
? FULL_TABLE_ITEM_LIMIT
@@ -801,7 +753,7 @@ function WidgetViewerModal(props: Props) {
api={api}
organization={organization}
widget={tableWidget}
- selection={modalTableSelection}
+ selection={modalSelection}
limit={
widget.displayType === DisplayType.TABLE
? FULL_TABLE_ITEM_LIMIT
@@ -829,7 +781,7 @@ function WidgetViewerModal(props: Props) {
api={api}
organization={organization}
widget={tableWidget}
- selection={modalTableSelection}
+ selection={modalSelection}
limit={
widget.displayType === DisplayType.TABLE
? FULL_TABLE_ITEM_LIMIT
@@ -875,16 +827,7 @@ function WidgetViewerModal(props: Props) {
@@ -899,15 +842,16 @@ function WidgetViewerModal(props: Props) {
widget={widget}
selection={selection}
organization={organization}
- onZoom={onZoom}
+ onZoom={(_evt, chart) => {
+ onZoom(_evt, chart);
+ setChartUnmodified(false);
+ }}
onLegendSelectChanged={onLegendSelectChanged}
legendOptions={{
selected: widgetLegendState.getWidgetSelectionState(widget),
}}
expandNumbers
- showSlider={shouldShowSlider}
noPadding
- chartZoomOptions={chartZoomOptions}
widgetLegendState={widgetLegendState}
/>
) : (
@@ -915,7 +859,7 @@ function WidgetViewerModal(props: Props) {
location={location}
api={api}
organization={organization}
- selection={modalChartSelection.current}
+ selection={modalSelection}
dashboardFilters={dashboardFilters}
// Top N charts rely on the orderby of the table
widget={primaryWidget}
@@ -925,9 +869,7 @@ function WidgetViewerModal(props: Props) {
selected: widgetLegendState.getWidgetSelectionState(widget),
}}
expandNumbers
- showSlider={shouldShowSlider}
noPadding
- chartZoomOptions={chartZoomOptions}
widgetLegendState={widgetLegendState}
/>
)}
@@ -940,7 +882,7 @@ function WidgetViewerModal(props: Props) {
)}
)}
- {(widget.queries.length > 1 || widget.queries[0].conditions) && (
+ {(widget.queries.length > 1 || widget.queries[0]!.conditions) && (
- {queryOptions[selectedQueryIndex].getHighlightedQuery({
+ {queryOptions[selectedQueryIndex]!.getHighlightedQuery({
display: 'block',
}) ??
- (queryOptions[selectedQueryIndex].label || (
+ (queryOptions[selectedQueryIndex]!.label || (
{EMPTY_QUERY_NAME}
))}
@@ -1096,7 +1038,7 @@ function WidgetViewerModal(props: Props) {
{column.name}}
- direction={widget.queries[0].orderby === sortField ? 'desc' : undefined}
+ direction={widget.queries[0]!.orderby === sortField ? 'desc' : undefined}
canSort={!!sortField}
generateSortLink={() => ({
...location,
@@ -118,14 +118,14 @@ export const renderDiscoverGridHeaderCell = ({
column: TableColumn,
_columnIndex: number
): React.ReactNode {
- const {orderby} = widget.queries[0];
+ const {orderby} = widget.queries[0]!;
// Need to convert orderby to aggregate alias because eventView still uses aggregate alias format
const aggregateAliasOrderBy = `${
orderby.startsWith('-') ? '-' : ''
}${getAggregateAlias(trimStart(orderby, '-'))}`;
const eventView = eventViewFromWidget(
widget.title,
- {...widget.queries[0], orderby: aggregateAliasOrderBy},
+ {...widget.queries[0]!, orderby: aggregateAliasOrderBy},
selection
);
const tableMeta = tableData?.meta;
@@ -318,7 +318,7 @@ export const renderReleaseGridHeaderCell = ({
): React.ReactNode {
const tableMeta = tableData?.meta;
const align = fieldAlignment(column.name, column.type, tableMeta);
- const widgetOrderBy = widget.queries[0].orderby;
+ const widgetOrderBy = widget.queries[0]!.orderby;
const sort: Sort = {
kind: widgetOrderBy.startsWith('-') ? 'desc' : 'asc',
field: widgetOrderBy.startsWith('-') ? widgetOrderBy.slice(1) : widgetOrderBy,
diff --git a/static/app/components/nav/index.spec.tsx b/static/app/components/nav/index.spec.tsx
index 2572f9e5d50cbb..3ad846511a10e5 100644
--- a/static/app/components/nav/index.spec.tsx
+++ b/static/app/components/nav/index.spec.tsx
@@ -8,7 +8,7 @@ jest.mock('sentry/utils/analytics', () => ({
trackAnalytics: jest.fn(),
}));
-import {getAllByRole, render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
+import {render, screen, userEvent, within} from 'sentry-test/reactTestingLibrary';
import Nav from 'sentry/components/nav';
@@ -30,30 +30,33 @@ const ALL_AVAILABLE_FEATURES = [
describe('Nav', function () {
describe('default', function () {
- beforeEach(() => {
+ function renderNav() {
render( , {
router: RouterFixture({
location: LocationFixture({pathname: '/organizations/org-slug/issues/'}),
}),
organization: OrganizationFixture({features: ALL_AVAILABLE_FEATURES}),
});
- });
+ }
+
it('renders primary navigation', async function () {
+ renderNav();
expect(
await screen.findByRole('navigation', {name: 'Primary Navigation'})
).toBeInTheDocument();
});
it('renders secondary navigation', async function () {
+ renderNav();
expect(
await screen.findByRole('navigation', {name: 'Secondary Navigation'})
).toBeInTheDocument();
});
it('renders expected primary nav items', function () {
- const links = getAllByRole(
- screen.getByRole('navigation', {name: 'Primary Navigation'}),
- 'link'
- );
+ renderNav();
+ const links = within(
+ screen.getByRole('navigation', {name: 'Primary Navigation'})
+ ).getAllByRole('link');
expect(links).toHaveLength(8);
[
@@ -72,7 +75,7 @@ describe('Nav', function () {
});
describe('issues', function () {
- beforeEach(() => {
+ function renderNav() {
render( , {
router: RouterFixture({
location: LocationFixture({
@@ -82,17 +85,19 @@ describe('Nav', function () {
}),
organization: OrganizationFixture({features: ALL_AVAILABLE_FEATURES}),
});
- });
+ }
it('renders secondary navigation', async function () {
+ renderNav();
expect(
await screen.findByRole('navigation', {name: 'Secondary Navigation'})
).toBeInTheDocument();
});
it('includes expected submenu items', function () {
+ renderNav();
const container = screen.getByRole('navigation', {name: 'Secondary Navigation'});
- const links = getAllByRole(container, 'link');
+ const links = within(container).getAllByRole('link');
expect(links).toHaveLength(6);
['All', 'Error & Outage', 'Trend', 'Craftsmanship', 'Security', 'Feedback'].forEach(
@@ -104,7 +109,7 @@ describe('Nav', function () {
});
describe('insights', function () {
- beforeEach(() => {
+ function renderNav() {
render( , {
router: RouterFixture({
location: LocationFixture({
@@ -113,17 +118,19 @@ describe('Nav', function () {
}),
organization: OrganizationFixture({features: ALL_AVAILABLE_FEATURES}),
});
- });
+ }
it('renders secondary navigation', async function () {
+ renderNav();
expect(
await screen.findByRole('navigation', {name: 'Secondary Navigation'})
).toBeInTheDocument();
});
it('includes expected submenu items', function () {
+ renderNav();
const container = screen.getByRole('navigation', {name: 'Secondary Navigation'});
- const links = getAllByRole(container, 'link');
+ const links = within(container).getAllByRole('link');
expect(links).toHaveLength(4);
['Frontend', 'Backend', 'Mobile', 'AI'].forEach((title, index) => {
expect(links[index]).toHaveAccessibleName(title);
@@ -132,24 +139,26 @@ describe('Nav', function () {
});
describe('explore', function () {
- beforeEach(() => {
+ function renderNav() {
render( , {
router: RouterFixture({
location: LocationFixture({pathname: '/organizations/org-slug/traces/'}),
}),
organization: OrganizationFixture({features: ALL_AVAILABLE_FEATURES}),
});
- });
+ }
it('renders secondary navigation', async function () {
+ renderNav();
expect(
await screen.findByRole('navigation', {name: 'Secondary Navigation'})
).toBeInTheDocument();
});
it('includes expected submenu items', function () {
+ renderNav();
const container = screen.getByRole('navigation', {name: 'Secondary Navigation'});
- const links = getAllByRole(container, 'link');
+ const links = within(container).getAllByRole('link');
expect(links).toHaveLength(7);
[
'Traces',
@@ -166,16 +175,17 @@ describe('Nav', function () {
});
describe('analytics', function () {
- beforeEach(() => {
+ function renderNav() {
render( , {
router: RouterFixture({
location: LocationFixture({pathname: '/organizations/org-slug/traces/'}),
}),
organization: OrganizationFixture({features: ALL_AVAILABLE_FEATURES}),
});
- });
+ }
it('tracks primary sidebar item', async function () {
+ renderNav();
const issues = screen.getByRole('link', {name: 'Issues'});
await userEvent.click(issues);
expect(trackAnalytics).toHaveBeenCalledWith(
diff --git a/static/app/components/nav/utils.tsx b/static/app/components/nav/utils.tsx
index 498162e743828d..5b509e01e8bb90 100644
--- a/static/app/components/nav/utils.tsx
+++ b/static/app/components/nav/utils.tsx
@@ -165,7 +165,7 @@ export function resolveNavItemTo(
return undefined;
}
if (isSidebarItem(item) && isNonEmptyArray(item.submenu)) {
- return item.submenu[0].to;
+ return item.submenu[0]!.to;
}
return undefined;
}
diff --git a/static/app/components/notificationActions/forms/onCallServiceForm.tsx b/static/app/components/notificationActions/forms/onCallServiceForm.tsx
index 21aa0696c994ce..5dc690069abc34 100644
--- a/static/app/components/notificationActions/forms/onCallServiceForm.tsx
+++ b/static/app/components/notificationActions/forms/onCallServiceForm.tsx
@@ -41,7 +41,7 @@ function OnCallServiceForm({
}: OnCallServiceFormProps) {
const [selectedAccount, setSelectedAccount] = useState(
action.integrationId
- ? Integrations[action.integrationId][0].action.integrationName
+ ? Integrations[action.integrationId]![0]!.action.integrationName
: ''
);
const [selectedDisplay, setSelectedDisplay] = useState(action.targetDisplay ?? '');
@@ -67,7 +67,7 @@ function OnCallServiceForm({
if (!action.integrationId) {
return [];
}
- const services = Integrations[action.integrationId];
+ const services = Integrations[action.integrationId]!;
return services.map(service => ({
key: service.action.targetDisplay ?? '',
label: service.action.targetDisplay,
diff --git a/static/app/components/notificationActions/notificationActionManager.spec.tsx b/static/app/components/notificationActions/notificationActionManager.spec.tsx
index 0e91aaea1b81b7..1f09cda2e977cb 100644
--- a/static/app/components/notificationActions/notificationActionManager.spec.tsx
+++ b/static/app/components/notificationActions/notificationActionManager.spec.tsx
@@ -86,7 +86,7 @@ describe('Adds, deletes, and updates notification actions', function () {
/>
);
const projectNotificationActions = screen.queryAllByTestId('notification-action');
- expect(projectNotificationActions.length).toBe(4);
+ expect(projectNotificationActions).toHaveLength(4);
});
it('disables buttons and dropdowns when disabled is True', function () {
@@ -97,7 +97,7 @@ describe('Adds, deletes, and updates notification actions', function () {
render(
{
- expect(screen.queryByTestId('sentry_notification-action')).toBeInTheDocument();
+ expect(screen.getByTestId('sentry_notification-action')).toBeInTheDocument();
});
});
it('Removes a Sentry notification action', async function () {
const mockDELETE = MockApiClient.addMockResponse({
- url: `/organizations/${organization.slug}/notifications/actions/${notificationActions[0].id}/`,
+ url: `/organizations/${organization.slug}/notifications/actions/${notificationActions[0]!.id}/`,
method: 'DELETE',
body: [],
});
render(
{
- expect(screen.queryByTestId('slack-action')).toBeInTheDocument();
+ expect(screen.getByTestId('slack-action')).toBeInTheDocument();
});
});
it('Removes a Slack action', async function () {
const mockDELETE = MockApiClient.addMockResponse({
- url: `/organizations/${organization.slug}/notifications/actions/${notificationActions[1].id}/`,
+ url: `/organizations/${organization.slug}/notifications/actions/${notificationActions[1]!.id}/`,
method: 'DELETE',
body: [],
});
render(
{
- expect(screen.queryByTestId('pagerduty-action')).toBeInTheDocument();
+ expect(screen.getByTestId('pagerduty-action')).toBeInTheDocument();
});
});
it('Edits a Pagerduty action', async function () {
const mockPUT = MockApiClient.addMockResponse({
- url: `/organizations/${organization.slug}/notifications/actions/${notificationActions[2].id}/`,
+ url: `/organizations/${organization.slug}/notifications/actions/${notificationActions[2]!.id}/`,
method: 'PUT',
body: [
{
@@ -393,7 +393,7 @@ describe('Adds, deletes, and updates notification actions', function () {
render(
{
- expect(screen.queryByTestId('opsgenie-action')).toBeInTheDocument();
+ expect(screen.getByTestId('opsgenie-action')).toBeInTheDocument();
});
});
it('Edits an Opsgenie Action', async function () {
const mockPUT = MockApiClient.addMockResponse({
- url: `/organizations/${organization.slug}/notifications/actions/${notificationActions[3].id}/`,
+ url: `/organizations/${organization.slug}/notifications/actions/${notificationActions[3]!.id}/`,
method: 'PUT',
body: [
{
@@ -498,7 +498,7 @@ describe('Adds, deletes, and updates notification actions', function () {
render(
{
// Add notification action
- const updatedActions = [...notificationActions, validActions[0].action];
+ const updatedActions = [...notificationActions, validActions[0]!.action];
setNotificationActions(updatedActions);
},
});
diff --git a/static/app/components/onboarding/documentationWrapper.tsx b/static/app/components/onboarding/documentationWrapper.tsx
deleted file mode 100644
index 978acc8b22e903..00000000000000
--- a/static/app/components/onboarding/documentationWrapper.tsx
+++ /dev/null
@@ -1,88 +0,0 @@
-import styled from '@emotion/styled';
-
-import type {AlertProps} from 'sentry/components/alert';
-import {alertStyles} from 'sentry/components/alert';
-import {space} from 'sentry/styles/space';
-
-type AlertType = AlertProps['type'];
-
-const getAlertSelector = (type: AlertType) =>
- type === 'muted' ? null : `.alert[level="${type}"], .alert-${type}`;
-
-export const DocumentationWrapper = styled('div')`
- /* Size of the new footer + 16px */
- padding-bottom: calc(72px + ${space(2)});
-
- h2 {
- font-size: 1.375rem;
- }
-
- h3 {
- font-size: 1.25rem;
- }
-
- h1,
- h2,
- h3,
- h4,
- h5,
- h6,
- p,
- ul,
- ol,
- li {
- margin-top: 0.5em;
- margin-bottom: 0.5em;
- }
-
- blockquote,
- hr,
- pre,
- pre[class*='language-'],
- div[data-language] {
- margin-top: 1em;
- margin-bottom: 1em;
- }
-
- blockquote {
- padding: ${space(1.5)} ${space(2)};
- ${p => alertStyles({theme: p.theme, type: 'info'})}
- }
-
- blockquote > * {
- margin: 0;
- }
-
- .gatsby-highlight:last-child {
- margin-bottom: 0;
- }
-
- hr {
- border-color: ${p => p.theme.border};
- }
-
- code {
- color: ${p => p.theme.pink400};
- }
-
- .alert {
- border-radius: ${p => p.theme.borderRadius};
- }
-
- /**
- * XXX(epurkhiser): This comes from the doc styles and avoids bottom margin issues in alerts
- */
- .content-flush-bottom *:last-child {
- margin-bottom: 0;
- }
-
- ${p =>
- Object.keys(p.theme.alert).map(
- type => `
- ${getAlertSelector(type as AlertType)} {
- ${alertStyles({theme: p.theme, type: type as AlertType})};
- display: block;
- }
- `
- )}
-`;
diff --git a/static/app/components/onboarding/frameworkSuggestionModal.tsx b/static/app/components/onboarding/frameworkSuggestionModal.tsx
index a35999e131881e..187ff72a4cfc13 100644
--- a/static/app/components/onboarding/frameworkSuggestionModal.tsx
+++ b/static/app/components/onboarding/frameworkSuggestionModal.tsx
@@ -313,7 +313,7 @@ function TopFrameworksImage({frameworks}: {frameworks: PlatformIntegration[]}) {
SENTRY_AUTH_TOKEN= ___ORG_AUTH_TOKEN___';
const tokenNodes = replaceTokensWithSpan(element);
- expect(element.innerHTML).toEqual(
+ expect(element.innerHTML).toBe(
'SENTRY_AUTH_TOKEN = '
);
expect(tokenNodes).toHaveLength(1);
- expect(element.contains(tokenNodes[0])).toBe(true);
+ expect(element.contains(tokenNodes[0]!)).toBe(true);
});
it('replaces multiple ___ORG_AUTH_TOKEN___ tokens', function () {
@@ -22,14 +22,14 @@ const assetUrl = '___ORG_AUTH_TOKEN___';
`;
const tokenNodes = replaceTokensWithSpan(element);
- expect(element.innerHTML).toEqual(
+ expect(element.innerHTML).toBe(
`
const cdn = ' ';
const assetUrl = ' ';
`
);
expect(tokenNodes).toHaveLength(2);
- expect(element.contains(tokenNodes[0])).toBe(true);
- expect(element.contains(tokenNodes[1])).toBe(true);
+ expect(element.contains(tokenNodes[0]!)).toBe(true);
+ expect(element.contains(tokenNodes[1]!)).toBe(true);
});
});
diff --git a/static/app/components/onboarding/gettingStartedDoc/step.tsx b/static/app/components/onboarding/gettingStartedDoc/step.tsx
index 862d864e8afdad..2233c1a4e45490 100644
--- a/static/app/components/onboarding/gettingStartedDoc/step.tsx
+++ b/static/app/components/onboarding/gettingStartedDoc/step.tsx
@@ -53,8 +53,8 @@ export function TabbedCodeSnippet({
onSelectAndCopy,
partialLoading,
}: TabbedCodeSnippetProps) {
- const [selectedTabValue, setSelectedTabValue] = useState(tabs[0].value);
- const selectedTab = tabs.find(tab => tab.value === selectedTabValue) ?? tabs[0];
+ const [selectedTabValue, setSelectedTabValue] = useState(tabs[0]!.value);
+ const selectedTab = tabs.find(tab => tab.value === selectedTabValue) ?? tabs[0]!;
const {code, language, filename} = selectedTab;
return (
diff --git a/static/app/components/onboarding/gettingStartedDoc/types.ts b/static/app/components/onboarding/gettingStartedDoc/types.ts
index 54e05444dd473f..852b6c0f21f222 100644
--- a/static/app/components/onboarding/gettingStartedDoc/types.ts
+++ b/static/app/components/onboarding/gettingStartedDoc/types.ts
@@ -111,6 +111,7 @@ export interface Docs;
featureFlagOnboarding?: OnboardingConfig;
feedbackOnboardingCrashApi?: OnboardingConfig;
+ feedbackOnboardingJsLoader?: OnboardingConfig;
feedbackOnboardingNpm?: OnboardingConfig;
performanceOnboarding?: OnboardingConfig;
platformOptions?: PlatformOptions;
@@ -123,6 +124,7 @@ export type ConfigType =
| 'onboarding'
| 'feedbackOnboardingNpm'
| 'feedbackOnboardingCrashApi'
+ | 'feedbackOnboardingJsLoader'
| 'crashReportOnboarding'
| 'replayOnboarding'
| 'replayOnboardingJsLoader'
diff --git a/static/app/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding.tsx b/static/app/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding.tsx
index 72e15c3a8dba34..abc989a265a5d5 100644
--- a/static/app/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding.tsx
+++ b/static/app/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding.tsx
@@ -155,13 +155,13 @@ export function getCrashReportSDKInstallFirstStep(params: DocsParams) {
params.sourcePackageRegistries && !params.sourcePackageRegistries.isLoading;
const version =
(dataLoaded &&
- params.sourcePackageRegistries.data?.['sentry.javascript.browser'].version) ??
+ params.sourcePackageRegistries.data?.['sentry.javascript.browser']!.version) ??
'';
const hash =
(dataLoaded &&
- params.sourcePackageRegistries.data?.['sentry.javascript.browser'].files[
+ params.sourcePackageRegistries.data?.['sentry.javascript.browser']!.files[
'bundle.min.js'
- ].checksums['sha384-base64']) ??
+ ]!.checksums['sha384-base64']) ??
'';
return {
@@ -242,13 +242,13 @@ export function getCrashReportSDKInstallFirstStepRails(params: DocsParams) {
params.sourcePackageRegistries && !params.sourcePackageRegistries.isLoading;
const version =
(dataLoaded &&
- params.sourcePackageRegistries.data?.['sentry.javascript.browser'].version) ??
+ params.sourcePackageRegistries.data?.['sentry.javascript.browser']!.version) ??
'';
const hash =
(dataLoaded &&
- params.sourcePackageRegistries.data?.['sentry.javascript.browser'].files[
+ params.sourcePackageRegistries.data?.['sentry.javascript.browser']!.files[
'bundle.min.js'
- ].checksums['sha384-base64']) ??
+ ]!.checksums['sha384-base64']) ??
'';
return {
diff --git a/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx
index d0dac3f24ad4a5..540ce2b51bac9f 100644
--- a/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx
+++ b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx
@@ -80,7 +80,7 @@ describe('useCurrentProjectState', () => {
},
wrapper: createWrapper(),
});
- expect(result.current.currentProject).toBe(undefined);
+ expect(result.current.currentProject).toBeUndefined();
});
it('should return currentProject=undefined when project url param is present and currentPanel != targetPanel', () => {
@@ -95,7 +95,7 @@ describe('useCurrentProjectState', () => {
},
wrapper: createWrapper(angular.id),
});
- expect(result.current.currentProject).toBe(undefined);
+ expect(result.current.currentProject).toBeUndefined();
});
it('should return the currentProject when currentPanel = targetPanel', () => {
@@ -185,7 +185,7 @@ describe('useCurrentProjectState', () => {
},
wrapper: createWrapper(),
});
- expect(result.current.currentProject).toBe(undefined);
+ expect(result.current.currentProject).toBeUndefined();
});
it('should override current project if setCurrentProjects is called', () => {
diff --git a/static/app/components/onboarding/platformOptionsControl.tsx b/static/app/components/onboarding/platformOptionsControl.tsx
index 7d7588781a6fde..1253ec46ad5822 100644
--- a/static/app/components/onboarding/platformOptionsControl.tsx
+++ b/static/app/components/onboarding/platformOptionsControl.tsx
@@ -26,8 +26,8 @@ export function useUrlPlatformOptions {
- const defaultValue = platformOptions[key].defaultValue;
- const values = platformOptions[key].items.map(({value}) => value);
+ const defaultValue = platformOptions[key]!.defaultValue;
+ const values = platformOptions[key]!.items.map(({value}) => value);
acc[key as keyof PlatformOptions] = values.includes(query[key])
? query[key]
: defaultValue ?? values[0];
@@ -100,7 +100,7 @@ export function PlatformOptionsControl({
handleChange(key, value)}
/>
))}
diff --git a/static/app/components/onboardingWizard/deprecatedNewSidebar.tsx b/static/app/components/onboardingWizard/deprecatedNewSidebar.tsx
new file mode 100644
index 00000000000000..d4833f619a4aa0
--- /dev/null
+++ b/static/app/components/onboardingWizard/deprecatedNewSidebar.tsx
@@ -0,0 +1,525 @@
+import {Fragment, useCallback, useEffect, useMemo, useState} from 'react';
+import {css} from '@emotion/react';
+import styled from '@emotion/styled';
+import {motion} from 'framer-motion';
+import partition from 'lodash/partition';
+
+import HighlightTopRight from 'sentry-images/pattern/highlight-top-right.svg';
+
+import {navigateTo} from 'sentry/actionCreators/navigation';
+import {updateOnboardingTask} from 'sentry/actionCreators/onboardingTasks';
+import {Button} from 'sentry/components/button';
+import {Chevron} from 'sentry/components/chevron';
+import InteractionStateLayer from 'sentry/components/interactionStateLayer';
+import SkipConfirm from 'sentry/components/onboardingWizard/skipConfirm';
+import type {useOnboardingTasks} from 'sentry/components/onboardingWizard/useOnboardingTasks';
+import {taskIsDone} from 'sentry/components/onboardingWizard/utils';
+import ProgressRing from 'sentry/components/progressRing';
+import SidebarPanel from 'sentry/components/sidebar/sidebarPanel';
+import type {CommonSidebarProps} from 'sentry/components/sidebar/types';
+import {Tooltip} from 'sentry/components/tooltip';
+import {IconCheckmark, IconClose, IconNot, IconSync} from 'sentry/icons';
+import {t, tct} from 'sentry/locale';
+import DemoWalkthroughStore from 'sentry/stores/demoWalkthroughStore';
+import {space} from 'sentry/styles/space';
+import {
+ type OnboardingTask,
+ OnboardingTaskKey,
+ type OnboardingTaskStatus,
+} from 'sentry/types/onboarding';
+import {trackAnalytics} from 'sentry/utils/analytics';
+import {isDemoModeEnabled} from 'sentry/utils/demoMode';
+import useApi from 'sentry/utils/useApi';
+import {useLocalStorageState} from 'sentry/utils/useLocalStorageState';
+import useOrganization from 'sentry/utils/useOrganization';
+import useRouter from 'sentry/utils/useRouter';
+
+const orderedGettingStartedTasks = [
+ OnboardingTaskKey.FIRST_PROJECT,
+ OnboardingTaskKey.FIRST_EVENT,
+ OnboardingTaskKey.INVITE_MEMBER,
+ OnboardingTaskKey.ALERT_RULE,
+ OnboardingTaskKey.SOURCEMAPS,
+ OnboardingTaskKey.RELEASE_TRACKING,
+ OnboardingTaskKey.LINK_SENTRY_TO_SOURCE_CODE,
+];
+
+const orderedBeyondBasicsTasks = [
+ OnboardingTaskKey.REAL_TIME_NOTIFICATIONS,
+ OnboardingTaskKey.SESSION_REPLAY,
+ OnboardingTaskKey.FIRST_TRANSACTION,
+ OnboardingTaskKey.SECOND_PLATFORM,
+];
+
+function groupTasksByCompletion(tasks: OnboardingTask[]) {
+ const [completedTasks, incompletedTasks] = partition(tasks, task => taskIsDone(task));
+ return {
+ completedTasks,
+ incompletedTasks,
+ };
+}
+
+interface TaskProps extends Pick {
+ hidePanel: () => void;
+ task: OnboardingTask;
+ completed?: boolean;
+ showWaitingIndicator?: boolean;
+}
+
+function Task({task, status, hidePanel, showWaitingIndicator}: TaskProps) {
+ const api = useApi();
+ const organization = useOrganization();
+ const router = useRouter();
+
+ const handleClick = useCallback(
+ (e: React.MouseEvent) => {
+ trackAnalytics('quick_start.task_card_clicked', {
+ organization,
+ todo_id: task.task,
+ todo_title: task.title,
+ action: 'clickthrough',
+ new_experience: true,
+ });
+
+ e.stopPropagation();
+
+ if (isDemoModeEnabled()) {
+ DemoWalkthroughStore.activateGuideAnchor(task.task);
+ }
+
+ if (task.actionType === 'external') {
+ window.open(task.location, '_blank');
+ }
+
+ if (task.actionType === 'action') {
+ task.action(router);
+ }
+
+ if (task.actionType === 'app') {
+ // Convert all paths to a location object
+ let to =
+ typeof task.location === 'string' ? {pathname: task.location} : task.location;
+ // Add referrer to all links
+ to = {...to, query: {...to.query, referrer: 'onboarding_task'}};
+
+ navigateTo(to, router);
+ }
+ hidePanel();
+ },
+ [task, organization, router, hidePanel]
+ );
+
+ const handleMarkSkipped = useCallback(
+ (taskKey: OnboardingTaskKey) => {
+ trackAnalytics('quick_start.task_card_clicked', {
+ organization,
+ todo_id: task.task,
+ todo_title: task.title,
+ action: 'skipped',
+ new_experience: true,
+ });
+ updateOnboardingTask(api, organization, {
+ task: taskKey,
+ status: 'skipped',
+ completionSeen: true,
+ });
+ },
+ [task, organization, api]
+ );
+
+ if (status === 'complete') {
+ return (
+
+ {task.title}
+
+
+
+
+ );
+ }
+
+ if (status === 'skipped') {
+ return (
+
+ {task.title}
+
+
+
+
+ );
+ }
+
+ return (
+
+
+
+
{task.title}
+
{task.description}
+
+ {task.requisiteTasks.length === 0 && (
+
+ {task.skippable && (
+ handleMarkSkipped(task.task)}>
+ {({skip}) => (
+ }
+ onClick={skip}
+ css={css`
+ /* If the pulsing indicator is active, the close button
+ * should be above it so it's clickable.
+ */
+ z-index: 1;
+ `}
+ />
+ )}
+
+ )}
+ {task.SupplementComponent && showWaitingIndicator && (
+
+ )}
+ {status === 'pending' && (
+
+
+
+ )}
+
+ )}
+
+ );
+}
+
+interface TaskGroupProps {
+ description: string;
+ /**
+ * Used for analytics
+ */
+ group: 'getting_started' | 'beyond_basics';
+ hidePanel: () => void;
+ taskKeyForWaitingIndicator: OnboardingTaskKey | undefined;
+ tasks: OnboardingTask[];
+ title: string;
+ expanded?: boolean;
+ toggleable?: boolean;
+}
+
+function TaskGroup({
+ title,
+ description,
+ tasks,
+ expanded,
+ hidePanel,
+ taskKeyForWaitingIndicator,
+ toggleable = true,
+ group,
+}: TaskGroupProps) {
+ const organization = useOrganization();
+ const [isExpanded, setIsExpanded] = useState(expanded);
+ const {completedTasks, incompletedTasks} = groupTasksByCompletion(tasks);
+ const [taskGroupComplete, setTaskGroupComplete] = useLocalStorageState(
+ `quick-start:${organization.slug}:${group}-completed`,
+ false
+ );
+
+ useEffect(() => {
+ setIsExpanded(expanded);
+ }, [expanded]);
+
+ useEffect(() => {
+ if (completedTasks.length !== tasks.length || taskGroupComplete) {
+ return;
+ }
+
+ trackAnalytics('quick_start.task_group_completed', {
+ organization,
+ group,
+ });
+
+ setTaskGroupComplete(true);
+ }, [
+ group,
+ organization,
+ completedTasks,
+ tasks,
+ setTaskGroupComplete,
+ taskGroupComplete,
+ ]);
+
+ return (
+
+ setIsExpanded(!isExpanded) : undefined}
+ >
+ {toggleable && }
+
+
+ {title}
+ {incompletedTasks.length === 0 && (
+
+
+
+ )}
+
+
{description}
+
+ {toggleable && (
+
+ )}
+
+ {isExpanded && (
+
+
+
+
+ {tct('[totalCompletedTasks] out of [totalTasks] tasks completed', {
+ totalCompletedTasks: completedTasks.length,
+ totalTasks: tasks.length,
+ })}
+
+
+ {incompletedTasks.map(task => (
+
+ ))}
+ {completedTasks.length > 0 && (
+
+ {t('Completed')}
+ {completedTasks.map(task => (
+
+ ))}
+
+ )}
+
+
+ )}
+
+ );
+}
+
+interface NewSidebarProps
+ extends Pick,
+ Pick<
+ ReturnType,
+ 'gettingStartedTasks' | 'beyondBasicsTasks'
+ > {
+ onClose: () => void;
+}
+
+export function DeprecatedNewOnboardingSidebar({
+ onClose,
+ orientation,
+ collapsed,
+ gettingStartedTasks,
+ beyondBasicsTasks,
+}: NewSidebarProps) {
+ const walkthrough = isDemoModeEnabled();
+
+ const sortedGettingStartedTasks = gettingStartedTasks.sort(
+ (a, b) =>
+ orderedGettingStartedTasks.indexOf(a.task) -
+ orderedGettingStartedTasks.indexOf(b.task)
+ );
+
+ const sortedBeyondBasicsTasks = beyondBasicsTasks.sort(
+ (a, b) =>
+ orderedBeyondBasicsTasks.indexOf(a.task) - orderedBeyondBasicsTasks.indexOf(b.task)
+ );
+
+ const taskKeyForWaitingIndicator = useMemo(() => {
+ return [...sortedGettingStartedTasks, ...sortedBeyondBasicsTasks].find(
+ task => !taskIsDone(task) && !!task.SupplementComponent
+ )?.task;
+ }, [sortedGettingStartedTasks, sortedBeyondBasicsTasks]);
+
+ return (
+
+
+ 0
+ }
+ toggleable={sortedBeyondBasicsTasks.length > 0}
+ taskKeyForWaitingIndicator={taskKeyForWaitingIndicator}
+ group="getting_started"
+ />
+ {sortedBeyondBasicsTasks.length > 0 && (
+ 0
+ }
+ taskKeyForWaitingIndicator={taskKeyForWaitingIndicator}
+ group="beyond_basics"
+ />
+ )}
+
+
+
+ );
+}
+
+const Wrapper = styled(SidebarPanel)`
+ width: 100%;
+ @media (min-width: ${p => p.theme.breakpoints.xsmall}) {
+ width: 450px;
+ }
+`;
+
+const Content = styled('div')`
+ padding: ${space(3)};
+ display: flex;
+ flex-direction: column;
+ gap: ${space(1)};
+ flex: 1;
+
+ p {
+ margin-bottom: ${space(1)};
+ }
+`;
+
+const TaskGroupWrapper = styled('div')`
+ border: 1px solid ${p => p.theme.border};
+ border-radius: ${p => p.theme.borderRadius};
+ padding: ${space(1)};
+
+ hr {
+ border-color: ${p => p.theme.translucentBorder};
+ margin: ${space(1)} -${space(1)};
+ }
+`;
+
+const TaskGroupHeader = styled('div')<{toggleable?: boolean}>`
+ cursor: ${p => (p.onClick ? 'pointer' : 'default')};
+ display: grid;
+ grid-template-columns: 1fr max-content;
+ padding: ${space(1)} ${space(1.5)};
+ gap: ${space(1.5)};
+ position: relative;
+ border-radius: ${p => p.theme.borderRadius};
+ align-items: center;
+
+ p {
+ margin: 0;
+ font-size: ${p => p.theme.fontSizeSmall};
+ color: ${p => p.theme.subText};
+ }
+`;
+
+const TaskGroupTitle = styled('div')`
+ display: grid;
+ grid-template-columns: repeat(2, max-content);
+ align-items: center;
+ gap: ${space(1)};
+`;
+
+const TaskGroupBody = styled('div')`
+ border-radius: ${p => p.theme.borderRadius};
+`;
+
+const TaskGroupProgress = styled('div')<{completed?: boolean}>`
+ font-size: ${p => p.theme.fontSizeSmall};
+ font-weight: ${p => p.theme.fontWeightBold};
+ padding: ${space(0.75)} ${space(1.5)};
+ ${p =>
+ p.completed
+ ? css`
+ color: ${p.theme.green300};
+ `
+ : css`
+ color: ${p.theme.subText};
+ display: grid;
+ grid-template-columns: 1fr max-content;
+ align-items: center;
+ gap: ${space(1)};
+ `}
+`;
+
+const taskIncompleteCss = css`
+ position: relative;
+ cursor: pointer;
+ align-items: flex-start;
+`;
+
+const taskCompletedCss = css`
+ strong {
+ opacity: 0.5;
+ }
+ align-items: center;
+`;
+
+const TaskWrapper = styled(motion.li)`
+ padding: ${space(1)} ${space(1.5)};
+ border-radius: ${p => p.theme.borderRadius};
+ display: grid;
+ grid-template-columns: 1fr max-content;
+ gap: ${space(1)};
+
+ p {
+ margin: 0;
+ font-size: ${p => p.theme.fontSizeSmall};
+ color: ${p => p.theme.subText};
+ }
+`;
+
+TaskWrapper.defaultProps = {
+ layout: true,
+};
+
+const TaskActions = styled('div')`
+ display: flex;
+ flex-direction: column;
+ gap: ${space(1)};
+`;
+
+const BottomLeft = styled('img')`
+ width: 60%;
+ transform: rotate(180deg);
+ margin-top: ${space(3)};
+`;
diff --git a/static/app/components/onboardingWizard/filterSupportedTasks.spec.tsx b/static/app/components/onboardingWizard/filterSupportedTasks.spec.tsx
index 43d9130c000cd0..911f054ca5dedb 100644
--- a/static/app/components/onboardingWizard/filterSupportedTasks.spec.tsx
+++ b/static/app/components/onboardingWizard/filterSupportedTasks.spec.tsx
@@ -67,7 +67,7 @@ describe('filterSupportedTasks', function () {
[supportedProject, unsupportedProject],
onboardingTasks
);
- expect(supportedTasks.length).toBe(4);
+ expect(supportedTasks).toHaveLength(4);
});
it('filters out for unsupported platform', function () {
@@ -76,7 +76,7 @@ describe('filterSupportedTasks', function () {
firstTransactionEvent: false,
}) as Project & {platform: PlatformKey};
const supportedTasks = filterSupportedTasks([project], onboardingTasks);
- expect(supportedTasks.length).toBe(1);
+ expect(supportedTasks).toHaveLength(1);
});
it('filters out performance only if all projects are without support', function () {
@@ -97,7 +97,7 @@ describe('filterSupportedTasks', function () {
OnboardingTaskKey.SESSION_REPLAY,
OnboardingTaskKey.USER_REPORTS,
].includes(task.task)
- ).length
- ).toBe(3);
+ )
+ ).toHaveLength(3);
});
});
diff --git a/static/app/components/onboardingWizard/newSidebar.spec.tsx b/static/app/components/onboardingWizard/newSidebar.spec.tsx
new file mode 100644
index 00000000000000..937f13dfefdac0
--- /dev/null
+++ b/static/app/components/onboardingWizard/newSidebar.spec.tsx
@@ -0,0 +1,167 @@
+import {initializeOrg} from 'sentry-test/initializeOrg';
+import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary';
+
+import {NewOnboardingSidebar} from 'sentry/components/onboardingWizard/newSidebar';
+import {type OnboardingTask, OnboardingTaskKey} from 'sentry/types/onboarding';
+
+const gettingStartedTasks: OnboardingTask[] = [
+ {
+ task: OnboardingTaskKey.FIRST_PROJECT,
+ title: 'Create your first project',
+ description: 'Select your platform and install the Sentry SDK',
+ skippable: false,
+ actionType: 'app',
+ location: '',
+ display: true,
+ requisites: [],
+ requisiteTasks: [],
+ status: 'pending',
+ },
+ {
+ task: OnboardingTaskKey.FIRST_EVENT,
+ title: 'Send your first error',
+ description: 'Throw an error in your app',
+ skippable: false,
+ actionType: 'app',
+ location: '',
+ display: true,
+ requisites: [],
+ requisiteTasks: [],
+ status: 'pending',
+ },
+];
+
+const beyondBasicsTasks: OnboardingTask[] = [
+ {
+ task: OnboardingTaskKey.FIRST_TRANSACTION,
+ title: 'Setup Tracing',
+ description: 'Capture your first transaction',
+ skippable: true,
+ requisites: [],
+ actionType: 'app',
+ location: '',
+ display: true,
+ requisiteTasks: [],
+ status: 'pending',
+ },
+];
+
+describe('NewSidebar', function () {
+ it('should render the sidebar with the correct groups and tasks', async function () {
+ render(
+
+ );
+
+ // Group 1
+ expect(screen.getByText('Getting Started')).toBeInTheDocument();
+ expect(screen.getByText('0 out of 2 tasks completed')).toBeInTheDocument();
+ // This means that the group is expanded
+ expect(screen.getByRole('button', {name: 'Collapse'})).toBeInTheDocument();
+ expect(screen.getByText(gettingStartedTasks[0]!.title)).toBeInTheDocument();
+ expect(screen.getByText(gettingStartedTasks[0]!.description)).toBeInTheDocument();
+ expect(screen.queryByRole('button', {name: 'Skip Task'})).not.toBeInTheDocument();
+
+ // Group 2
+ expect(screen.getByText('Beyond the Basics')).toBeInTheDocument();
+ expect(screen.getByText('0 out of 1 task completed')).toBeInTheDocument();
+ // This means that the group is not expanded
+ expect(screen.queryByText(beyondBasicsTasks[0]!.title)).not.toBeInTheDocument();
+
+ // Manually expand second group
+ await userEvent.click(screen.getByRole('button', {name: 'Expand'}));
+ // Tasks from the second group should be visible
+ expect(await screen.findByText(beyondBasicsTasks[0]!.title)).toBeInTheDocument();
+ // task from second group are skippable
+ expect(screen.getByRole('button', {name: 'Skip Task'})).toBeInTheDocument();
+ });
+
+ it('if first group completed, second group should be expanded by default', function () {
+ render(
+ ({
+ ...task,
+ status: 'complete',
+ }))}
+ beyondBasicsTasks={beyondBasicsTasks}
+ />
+ );
+
+ // Group 1
+ expect(screen.getByText('Getting Started')).toBeInTheDocument();
+ expect(screen.getByText('2 out of 2 tasks completed')).toBeInTheDocument();
+
+ // Group 2
+ // This means that the group is expanded
+ expect(screen.getByText(beyondBasicsTasks[0]!.title)).toBeInTheDocument();
+ });
+
+ it('show skipable confirmation when skipping a task', async function () {
+ const {organization} = initializeOrg();
+
+ const mockUpdate = MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/onboarding-tasks/`,
+ method: 'POST',
+ });
+
+ render(
+ ,
+ {
+ organization,
+ }
+ );
+
+ // Manually expand second group
+ await userEvent.click(screen.getByRole('button', {name: 'Expand'}));
+ // Tasks from the second group should be visible
+ expect(await screen.findByText(beyondBasicsTasks[0]!.title)).toBeInTheDocument();
+
+ await userEvent.click(screen.getByRole('button', {name: 'Skip Task'}));
+
+ // Confirmation to skip should be visible
+ expect(await screen.findByText(/Not sure what to do/)).toBeInTheDocument();
+ expect(screen.getByRole('button', {name: 'Just Skip'})).toBeInTheDocument();
+ expect(screen.getByRole('button', {name: 'Help'})).toBeInTheDocument();
+
+ // Click help
+ await userEvent.click(screen.getByRole('button', {name: 'Help'}));
+
+ // Show help menu
+ expect(await screen.findByText('Search Support, Docs and More')).toBeInTheDocument();
+ expect(screen.getByRole('link', {name: 'Contact Support'})).toBeInTheDocument();
+ expect(screen.getByRole('link', {name: 'Join our Discord'})).toBeInTheDocument();
+ expect(screen.getByRole('link', {name: 'Visit Help Center'})).toBeInTheDocument();
+
+ // Click 'Just Skip'
+ await userEvent.click(screen.getByRole('button', {name: 'Just Skip'}));
+ await waitFor(() => {
+ expect(mockUpdate).toHaveBeenCalledWith(
+ `/organizations/${organization.slug}/onboarding-tasks/`,
+ expect.objectContaining({
+ data: expect.objectContaining({
+ status: 'skipped',
+ task: OnboardingTaskKey.FIRST_TRANSACTION,
+ }),
+ })
+ );
+ });
+
+ // Dismiss skip confirmation
+ await userEvent.click(screen.getByRole('button', {name: 'Dismiss Skip'}));
+ expect(screen.queryByText(/Not sure what to do/)).not.toBeInTheDocument();
+ });
+});
diff --git a/static/app/components/onboardingWizard/newSidebar.tsx b/static/app/components/onboardingWizard/newSidebar.tsx
index 72fa11a44e48d9..719d245a30ed65 100644
--- a/static/app/components/onboardingWizard/newSidebar.tsx
+++ b/static/app/components/onboardingWizard/newSidebar.tsx
@@ -1,32 +1,37 @@
import {Fragment, useCallback, useEffect, useMemo, useState} from 'react';
-import {css} from '@emotion/react';
+import {css, useTheme} from '@emotion/react';
import styled from '@emotion/styled';
import {motion} from 'framer-motion';
import partition from 'lodash/partition';
import HighlightTopRight from 'sentry-images/pattern/highlight-top-right.svg';
+import {openHelpSearchModal} from 'sentry/actionCreators/modal';
import {navigateTo} from 'sentry/actionCreators/navigation';
import {updateOnboardingTask} from 'sentry/actionCreators/onboardingTasks';
import {Button} from 'sentry/components/button';
import {Chevron} from 'sentry/components/chevron';
+import {DropdownMenu} from 'sentry/components/dropdownMenu';
import InteractionStateLayer from 'sentry/components/interactionStateLayer';
-import SkipConfirm from 'sentry/components/onboardingWizard/skipConfirm';
import type {useOnboardingTasks} from 'sentry/components/onboardingWizard/useOnboardingTasks';
import {taskIsDone} from 'sentry/components/onboardingWizard/utils';
import ProgressRing from 'sentry/components/progressRing';
import SidebarPanel from 'sentry/components/sidebar/sidebarPanel';
import type {CommonSidebarProps} from 'sentry/components/sidebar/types';
import {Tooltip} from 'sentry/components/tooltip';
-import {IconCheckmark, IconClose, IconNot, IconSync} from 'sentry/icons';
+import {
+ IconCheckmark,
+ IconChevron,
+ IconClose,
+ IconNot,
+ IconSupport,
+ IconSync,
+} from 'sentry/icons';
import {t, tct} from 'sentry/locale';
+import ConfigStore from 'sentry/stores/configStore';
import DemoWalkthroughStore from 'sentry/stores/demoWalkthroughStore';
import {space} from 'sentry/styles/space';
-import {
- type OnboardingTask,
- OnboardingTaskKey,
- type OnboardingTaskStatus,
-} from 'sentry/types/onboarding';
+import {type OnboardingTask, OnboardingTaskKey} from 'sentry/types/onboarding';
import {trackAnalytics} from 'sentry/utils/analytics';
import {isDemoModeEnabled} from 'sentry/utils/demoMode';
import useApi from 'sentry/utils/useApi';
@@ -40,8 +45,8 @@ const orderedGettingStartedTasks = [
OnboardingTaskKey.INVITE_MEMBER,
OnboardingTaskKey.ALERT_RULE,
OnboardingTaskKey.SOURCEMAPS,
- OnboardingTaskKey.RELEASE_TRACKING,
OnboardingTaskKey.LINK_SENTRY_TO_SOURCE_CODE,
+ OnboardingTaskKey.RELEASE_TRACKING,
];
const orderedBeyondBasicsTasks = [
@@ -59,17 +64,193 @@ function groupTasksByCompletion(tasks: OnboardingTask[]) {
};
}
-interface TaskProps extends Pick {
+interface TaskCardProps {
+ icon: React.ReactNode;
+ title: React.ReactNode;
+ actions?: React.ReactNode;
+ className?: string;
+ description?: React.ReactNode;
+ onClick?: (e: React.MouseEvent) => void;
+}
+
+function TaskCard({
+ description,
+ icon,
+ title,
+ actions,
+ onClick,
+ className,
+}: TaskCardProps) {
+ return (
+
+ {onClick && }
+ {icon}
+
+ {title}
+ {description && {description}
}
+
+ {actions}
+
+ );
+}
+
+interface TaskStatusIconProps {
+ status: 'complete' | 'inProgress' | 'skipped' | 'pending';
+ progress?: number;
+ tooltipText?: string;
+}
+
+function TaskStatusIcon({status, tooltipText, progress}: TaskStatusIconProps) {
+ const theme = useTheme();
+
+ const progressValue = progress ?? 0;
+
+ return (
+
+ {status === 'complete' ? (
+
+ ) : status === 'skipped' ? (
+
+ ) : status === 'pending' ? (
+
+ ) : (
+
+ )}
+
+ );
+}
+
+interface SkipConfirmationProps {
+ onConfirm: () => void;
+ onDismiss: () => void;
+}
+
+function SkipConfirmation({onConfirm, onDismiss}: SkipConfirmationProps) {
+ const organization = useOrganization();
+ const theme = useTheme();
+
+ return (
+
+
+ }
+ actions={
+
+ }
+ onClick={event => {
+ event.stopPropagation();
+ onConfirm();
+ }}
+ />
+ ,
+ showChevron: false,
+ size: 'zero',
+ borderless: true,
+ }}
+ items={[
+ {
+ key: 'search',
+ label: t('Search Support, Docs and More'),
+ onAction() {
+ openHelpSearchModal({organization});
+ },
+ },
+ {
+ key: 'help',
+ label: t('Visit Help Center'),
+ // TODO(Telemetry): Make it open in a new tab
+ to: 'https://sentry.zendesk.com/hc/en-us',
+ },
+ {
+ key: 'discord',
+ label: t('Join our Discord'),
+ to: 'https://discord.com/invite/sentry',
+ },
+ {
+ key: 'support',
+ label: t('Contact Support'),
+ to: `mailto:${ConfigStore.get('supportEmail')}`,
+ },
+ ]}
+ />
+ }
+ onClick={event => {
+ event.stopPropagation();
+ onDismiss();
+ }}
+ />
+
+ }
+ />
+
+ );
+}
+
+interface TaskProps {
hidePanel: () => void;
task: OnboardingTask;
completed?: boolean;
showWaitingIndicator?: boolean;
}
-function Task({task, status, hidePanel, showWaitingIndicator}: TaskProps) {
+function Task({task, hidePanel, showWaitingIndicator}: TaskProps) {
const api = useApi();
const organization = useOrganization();
const router = useRouter();
+ const [showSkipConfirmation, setShowSkipConfirmation] = useState(false);
const handleClick = useCallback(
(e: React.MouseEvent) => {
@@ -127,79 +308,72 @@ function Task({task, status, hidePanel, showWaitingIndicator}: TaskProps) {
[task, organization, api]
);
- if (status === 'complete') {
- return (
-
- {task.title}
-
-
-
-
- );
- }
-
- if (status === 'skipped') {
- return (
-
- {task.title}
-
-
-
-
- );
- }
+ const iconTooltipText = useMemo(() => {
+ switch (task.status) {
+ case 'complete':
+ return t('Task completed');
+ case 'pending':
+ return task.pendingTitle ?? t('Task in progress\u2026');
+ case 'skipped':
+ return t('Task skipped');
+ default:
+ return undefined;
+ }
+ }, [task.status, task.pendingTitle]);
return (
-
-
-
-
{task.title}
-
{task.description}
-
- {task.requisiteTasks.length === 0 && (
-
- {task.skippable && (
- handleMarkSkipped(task.task)}>
- {({skip}) => (
- }
- onClick={skip}
- css={css`
- /* If the pulsing indicator is active, the close button
- * should be above it so it's clickable.
- */
- z-index: 1;
- `}
- />
- )}
-
- )}
- {task.SupplementComponent && showWaitingIndicator && (
-
- )}
- {status === 'pending' && (
-
-
-
- )}
-
+
+ }
+ description={task.description}
+ title={{task.title} }
+ actions={
+ task.status === 'complete' || task.status === 'skipped'
+ ? undefined
+ : task.requisiteTasks.length === 0 && (
+
+ {task.skippable && (
+ }
+ onClick={event => {
+ event.stopPropagation();
+ setShowSkipConfirmation(!showSkipConfirmation);
+ }}
+ css={css`
+ /* If the pulsing indicator is active, the close button
+ * should be above it so it's clickable.
+ */
+ z-index: 1;
+ `}
+ />
+ )}
+ {task.SupplementComponent && showWaitingIndicator && (
+
+ )}
+
+ )
+ }
+ />
+ {showSkipConfirmation && (
+ handleMarkSkipped(task.task)}
+ onDismiss={() => setShowSkipConfirmation(false)}
+ />
)}
);
}
interface TaskGroupProps {
- description: string;
/**
* Used for analytics
*/
@@ -214,7 +388,6 @@ interface TaskGroupProps {
function TaskGroup({
title,
- description,
tasks,
expanded,
hidePanel,
@@ -257,67 +430,51 @@ function TaskGroup({
return (
{title}}
+ description={
+ tasks.length > 1
+ ? tct('[totalCompletedTasks] out of [totalTasks] tasks completed', {
+ totalCompletedTasks: completedTasks.length,
+ totalTasks: tasks.length,
+ })
+ : tct('[totalCompletedTasks] out of [totalTasks] task completed', {
+ totalCompletedTasks: completedTasks.length,
+ totalTasks: tasks.length,
+ })
+ }
+ hasProgress={completedTasks.length > 0}
onClick={toggleable ? () => setIsExpanded(!isExpanded) : undefined}
- >
- {toggleable && }
-
-
- {title}
- {incompletedTasks.length === 0 && (
-
-
-
- )}
-
-
{description}
-
- {toggleable && (
-
- )}
-
+ }
+ actions={
+ }
+ aria-label={isExpanded ? t('Collapse') : t('Expand')}
+ aria-expanded={isExpanded}
+ size="zero"
+ borderless
+ />
+ }
+ />
{isExpanded && (
-
- {tct('[totalCompletedTasks] out of [totalTasks] tasks completed', {
- totalCompletedTasks: completedTasks.length,
- totalTasks: tasks.length,
- })}
-
-
{incompletedTasks.map(task => (
))}
- {completedTasks.length > 0 && (
-
- {t('Completed')}
- {completedTasks.map(task => (
-
- ))}
-
- )}
+ {completedTasks.map(task => (
+
+ ))}
)}
@@ -370,9 +527,6 @@ export function NewOnboardingSidebar({
0 && (
p.theme.breakpoints.xsmall}) {
- width: 450px;
+ width: 460px;
}
`;
@@ -435,91 +586,70 @@ const TaskGroupWrapper = styled('div')`
}
`;
-const TaskGroupHeader = styled('div')<{toggleable?: boolean}>`
- cursor: ${p => (p.onClick ? 'pointer' : 'default')};
- display: grid;
- grid-template-columns: 1fr max-content;
- padding: ${space(1)} ${space(1.5)};
- gap: ${space(1.5)};
- position: relative;
- border-radius: ${p => p.theme.borderRadius};
- align-items: center;
-
+const TaskGroupHeader = styled(TaskCard)<{hasProgress: boolean}>`
p {
- margin: 0;
- font-size: ${p => p.theme.fontSizeSmall};
- color: ${p => p.theme.subText};
+ color: ${p => (p.hasProgress ? p.theme.successText : p.theme.subText)};
}
`;
-const TaskGroupTitle = styled('div')`
- display: grid;
- grid-template-columns: repeat(2, max-content);
- align-items: center;
- gap: ${space(1)};
-`;
-
-const TaskGroupBody = styled('div')`
+const TaskGroupBody = styled(motion.ul)`
border-radius: ${p => p.theme.borderRadius};
+ list-style-type: none;
+ padding: 0;
+ margin: 0;
`;
-const TaskGroupProgress = styled('div')<{completed?: boolean}>`
- font-size: ${p => p.theme.fontSizeSmall};
- font-weight: ${p => p.theme.fontWeightBold};
- padding: ${space(0.75)} ${space(1.5)};
- ${p =>
- p.completed
- ? css`
- color: ${p.theme.green300};
- `
- : css`
- color: ${p.theme.subText};
- display: grid;
- grid-template-columns: 1fr max-content;
- align-items: center;
- gap: ${space(1)};
- `}
+const TaskWrapper = styled(motion.li)`
+ gap: ${space(1)};
`;
-const taskIncompleteCss = css`
- position: relative;
- cursor: pointer;
- align-items: flex-start;
+const TaskActions = styled('div')`
+ display: flex;
+ flex-direction: column;
+ align-items: center;
+ gap: ${space(1)};
`;
-const taskCompletedCss = css`
- strong {
- opacity: 0.5;
- }
- align-items: center;
+const BottomLeft = styled('img')`
+ width: 60%;
+ transform: rotate(180deg);
+ margin-top: ${space(3)};
`;
-const TaskWrapper = styled(motion.li)`
- padding: ${space(1)} ${space(1.5)};
- border-radius: ${p => p.theme.borderRadius};
+const TaskCardWrapper = styled('div')`
+ position: relative;
display: grid;
- grid-template-columns: 1fr max-content;
- gap: ${space(1)};
-
+ grid-template-columns: max-content 1fr max-content;
+ gap: ${space(1.5)};
+ cursor: ${p => (p.onClick ? 'pointer' : 'default')};
+ border-radius: ${p => p.theme.borderRadius};
+ padding: ${space(1)} ${space(1.5)};
p {
margin: 0;
font-size: ${p => p.theme.fontSizeSmall};
- color: ${p => p.theme.subText};
}
`;
-TaskWrapper.defaultProps = {
- layout: true,
-};
+const TaskCardDescription = styled('div')`
+ line-height: 20px;
+`;
-const TaskActions = styled('div')`
+const TaskCardIcon = styled('div')`
display: flex;
- flex-direction: column;
+ align-items: center;
+ height: 20px;
+`;
+
+const TaskCardActions = styled('div')`
+ display: grid;
+ grid-auto-flow: column;
+ grid-auto-columns: 20px;
gap: ${space(1)};
+ align-items: flex-start;
`;
-const BottomLeft = styled('img')`
- width: 60%;
- transform: rotate(180deg);
- margin-top: ${space(3)};
+const SkipConfirmationWrapper = styled('div')`
+ margin: ${space(1)} 0;
+ border: 1px solid ${p => p.theme.border};
+ border-radius: ${p => p.theme.borderRadius};
`;
diff --git a/static/app/components/onboardingWizard/skipConfirm.tsx b/static/app/components/onboardingWizard/skipConfirm.tsx
index 4820c9fe8c4b5a..bda36f8973d3df 100644
--- a/static/app/components/onboardingWizard/skipConfirm.tsx
+++ b/static/app/components/onboardingWizard/skipConfirm.tsx
@@ -1,4 +1,4 @@
-import {Component, Fragment} from 'react';
+import {Fragment, useState} from 'react';
import styled from '@emotion/styled';
import {Button, LinkButton} from 'sentry/components/button';
@@ -13,39 +13,30 @@ type Props = {
onSkip: () => void;
};
-type State = {
- showConfirmation: boolean;
-};
-
-class SkipConfirm extends Component {
- state: State = {
- showConfirmation: false,
- };
+function SkipConfirm(props: Props) {
+ const [showConfirmation, setShowConfirmation] = useState(false);
+ const {onSkip, children} = props;
- toggleConfirm = (e: React.MouseEvent) => {
+ const toggleConfirm = (e: React.MouseEvent) => {
e.stopPropagation();
- this.setState(state => ({showConfirmation: !state.showConfirmation}));
+ setShowConfirmation(!showConfirmation);
};
- handleSkip = (e: React.MouseEvent) => {
+ const handleSkip = (e: React.MouseEvent) => {
e.stopPropagation();
- this.props.onSkip();
+ onSkip();
};
- render() {
- const {children} = this.props;
-
- return (
-
- {children({skip: this.toggleConfirm})}
-
-
- );
- }
+ return (
+
+ {children({skip: toggleConfirm})}
+
+
+ );
}
export default SkipConfirm;
diff --git a/static/app/components/onboardingWizard/task.tsx b/static/app/components/onboardingWizard/task.tsx
index 76bb18b5ce06fa..21e3495b2e0b21 100644
--- a/static/app/components/onboardingWizard/task.tsx
+++ b/static/app/components/onboardingWizard/task.tsx
@@ -128,7 +128,7 @@ function Task(props: Props) {
diff --git a/static/app/components/onboardingWizard/taskConfig.tsx b/static/app/components/onboardingWizard/taskConfig.tsx
index 74e69ba9329289..1e62e7db20d483 100644
--- a/static/app/components/onboardingWizard/taskConfig.tsx
+++ b/static/app/components/onboardingWizard/taskConfig.tsx
@@ -7,6 +7,7 @@ import type {OnboardingContextProps} from 'sentry/components/onboarding/onboardi
import {filterSupportedTasks} from 'sentry/components/onboardingWizard/filterSupportedTasks';
import {
hasQuickStartUpdatesFeature,
+ hasQuickStartUpdatesFeatureGA,
taskIsDone,
} from 'sentry/components/onboardingWizard/utils';
import {filterProjects} from 'sentry/components/performanceOnboarding/utils';
@@ -57,7 +58,7 @@ function getIssueAlertUrl({projects, organization}: Options) {
}
// pick the first project with events if we have that, otherwise just pick the first project
const firstProjectWithEvents = projects.find(project => !!project.firstEvent);
- const project = firstProjectWithEvents ?? projects[0];
+ const project = firstProjectWithEvents ?? projects[0]!;
return `/organizations/${organization.slug}/alerts/${project.slug}/wizard/`;
}
@@ -80,7 +81,7 @@ function getOnboardingInstructionsUrl({projects, organization}: Options) {
const firstProjectWithoutError = projects.find(project => !project.firstEvent);
// If all projects contain errors, this step will not be visible to the user,
// but if the user falls into this case for some reason, we pick the first project
- const project = firstProjectWithoutError ?? projects[0];
+ const project = firstProjectWithoutError ?? projects[0]!;
let url = `/${organization.slug}/${project.slug}/getting-started/`;
@@ -99,7 +100,7 @@ function getMetricAlertUrl({projects, organization}: Options) {
const firstProjectWithEvents = projects.find(
project => !!project.firstTransactionEvent
);
- const project = firstProjectWithEvents ?? projects[0];
+ const project = firstProjectWithEvents ?? projects[0]!;
return {
pathname: `/organizations/${organization.slug}/alerts/${project.slug}/wizard/`,
query: {
@@ -218,6 +219,7 @@ export function getOnboardingTasks({
);
}
@@ -228,7 +230,7 @@ export function getOnboardingTasks({
!taskIsDone(task) && onCompleteTask?.()}
>
@@ -323,6 +325,7 @@ export function getOnboardingTasks({
);
},
@@ -369,14 +372,14 @@ export function getOnboardingTasks({
if (projectsForOnboarding.length) {
navigateTo(
- `${performanceUrl}?project=${projectsForOnboarding[0].id}#performance-sidequest`,
+ `${performanceUrl}?project=${projectsForOnboarding[0]!.id}#performance-sidequest`,
router
);
return;
}
navigateTo(
- `${performanceUrl}?project=${projectsWithoutFirstTransactionEvent[0].id}#performance-sidequest`,
+ `${performanceUrl}?project=${projectsWithoutFirstTransactionEvent[0]!.id}#performance-sidequest`,
router
);
},
@@ -391,7 +394,12 @@ export function getOnboardingTasks({
if (!projects?.length || task.requisiteTasks.length > 0 || taskIsDone(task)) {
return null;
}
- return ;
+ return (
+
+ );
}
return !!projects?.length &&
@@ -400,7 +408,7 @@ export function getOnboardingTasks({
!taskIsDone(task) && onCompleteTask?.()}
>
@@ -466,6 +474,7 @@ export function getOnboardingTasks({
);
}
@@ -476,7 +485,7 @@ export function getOnboardingTasks({
!taskIsDone(task) && onCompleteTask?.()}
>
@@ -578,32 +587,44 @@ export function getMergedTasks({organization, projects, onboardingContext}: Opti
}));
}
-const PulsingIndicator = styled('div')<{hasQuickStartUpdatesFeature?: boolean}>`
+const PulsingIndicator = styled('div')<{
+ hasQuickStartUpdatesFeature?: boolean;
+ hasQuickStartUpdatesFeatureGA?: boolean;
+}>`
${pulsingIndicatorStyles};
${p =>
- p.hasQuickStartUpdatesFeature
+ p.hasQuickStartUpdatesFeatureGA
? css`
- margin: 0 ${space(0.5)};
+ margin: 0;
`
- : css`
- margin-right: ${space(1)};
- `}
+ : p.hasQuickStartUpdatesFeature
+ ? css`
+ margin: 0 ${space(0.5)};
+ `
+ : css`
+ margin-right: ${space(1)};
+ `}
`;
const EventWaitingIndicator = styled(
({
hasQuickStartUpdatesFeature: quickStartUpdatesFeature,
+ hasQuickStartUpdatesFeatureGA: quickStartUpdatesFeatureGA,
text,
...p
}: React.HTMLAttributes & {
hasQuickStartUpdatesFeature?: boolean;
+ hasQuickStartUpdatesFeatureGA?: boolean;
text?: string;
}) => {
if (quickStartUpdatesFeature) {
return (
);
diff --git a/static/app/components/onboardingWizard/utils.tsx b/static/app/components/onboardingWizard/utils.tsx
index 1374b3fc828b03..7cae388afafbb3 100644
--- a/static/app/components/onboardingWizard/utils.tsx
+++ b/static/app/components/onboardingWizard/utils.tsx
@@ -16,3 +16,7 @@ export const findUpcomingTasks = (task: OnboardingTask) =>
export function hasQuickStartUpdatesFeature(organization: Organization) {
return organization.features?.includes('quick-start-updates');
}
+
+export function hasQuickStartUpdatesFeatureGA(organization: Organization) {
+ return organization.features?.includes('quick-start-updates-ga');
+}
diff --git a/static/app/components/organizations/environmentPageFilter/index.spec.tsx b/static/app/components/organizations/environmentPageFilter/index.spec.tsx
index 4d2d98fd6bf0b9..8d6c6363c55516 100644
--- a/static/app/components/organizations/environmentPageFilter/index.spec.tsx
+++ b/static/app/components/organizations/environmentPageFilter/index.spec.tsx
@@ -76,8 +76,8 @@ describe('EnvironmentPageFilter', function () {
await userEvent.click(screen.getByRole('button', {name: 'All Envs'}));
// Select prod & stage by clicking on their checkboxes
- await fireEvent.click(screen.getByRole('checkbox', {name: 'Select prod'}));
- await fireEvent.click(screen.getByRole('checkbox', {name: 'Select stage'}));
+ fireEvent.click(screen.getByRole('checkbox', {name: 'Select prod'}));
+ fireEvent.click(screen.getByRole('checkbox', {name: 'Select stage'}));
// Click "Apply"
await userEvent.click(screen.getByRole('button', {name: 'Apply'}));
diff --git a/static/app/components/organizations/environmentPageFilter/trigger.tsx b/static/app/components/organizations/environmentPageFilter/trigger.tsx
index 7146b4b5e9a937..b37703f0156056 100644
--- a/static/app/components/organizations/environmentPageFilter/trigger.tsx
+++ b/static/app/components/organizations/environmentPageFilter/trigger.tsx
@@ -27,7 +27,7 @@ function BaseEnvironmentPageFilterTrigger(
// Show 2 environments only if the combined string's length does not exceed 25.
// Otherwise show only 1 environment.
const envsToShow =
- value[0]?.length + value[1]?.length <= 23 ? value.slice(0, 2) : value.slice(0, 1);
+ value[0]!?.length + value[1]!?.length <= 23 ? value.slice(0, 2) : value.slice(0, 1);
// e.g. "production, staging"
const enumeratedLabel = envsToShow.map(env => trimSlug(env, 25)).join(', ');
diff --git a/static/app/components/organizations/headerItem.tsx b/static/app/components/organizations/headerItem.tsx
deleted file mode 100644
index dd048a13d0f332..00000000000000
--- a/static/app/components/organizations/headerItem.tsx
+++ /dev/null
@@ -1,201 +0,0 @@
-import {forwardRef} from 'react';
-import isPropValid from '@emotion/is-prop-valid';
-import type {Theme} from '@emotion/react';
-import styled from '@emotion/styled';
-import omit from 'lodash/omit';
-
-import Link from 'sentry/components/links/link';
-import {Tooltip} from 'sentry/components/tooltip';
-import {IconChevron, IconClose, IconInfo, IconLock, IconSettings} from 'sentry/icons';
-import {t} from 'sentry/locale';
-import {space} from 'sentry/styles/space';
-
-type DefaultProps = {
- allowClear: boolean;
-};
-
-type Props = {
- icon: React.ReactNode;
- forwardedRef?: React.Ref;
- hasChanges?: boolean;
- hasSelected?: boolean;
- hint?: string;
- isOpen?: boolean;
- loading?: boolean;
- locked?: boolean;
- lockedMessage?: React.ReactNode;
- onClear?: () => void;
- settingsLink?: string;
-} & Partial &
- React.HTMLAttributes;
-
-function HeaderItem({
- children,
- isOpen,
- hasSelected,
- icon,
- locked,
- lockedMessage,
- settingsLink,
- hint,
- loading,
- forwardedRef,
- onClear,
- allowClear = true,
- ...props
-}: Props) {
- const handleClear = (e: React.MouseEvent) => {
- e.stopPropagation();
- onClear?.();
- };
-
- const textColorProps = {
- locked,
- isOpen,
- hasSelected,
- };
-
- return (
-
- {icon}
-
- {children}
-
- {settingsLink && (
-
-
-
- )}
-
- {hint && (
-
-
-
-
-
- )}
- {hasSelected && !locked && allowClear && (
-
- )}
- {!locked && !loading && (
-
-
-
- )}
- {locked && (
-
-
-
- )}
-
- );
-}
-
-// Infer props here because of styled/theme
-const getColor = (p: ColorProps & {theme: Theme}) => {
- if (p.locked) {
- return p.theme.gray300;
- }
- return p.isOpen || p.hasSelected ? p.theme.textColor : p.theme.gray300;
-};
-
-type ColorProps = {
- hasSelected?: boolean;
- isOpen?: boolean;
- locked?: boolean;
-};
-
-const StyledHeaderItem = styled('div', {
- shouldForwardProp: p => typeof p === 'string' && isPropValid(p) && p !== 'loading',
-})<
- ColorProps & {
- loading: boolean;
- }
->`
- display: flex;
- padding: 0 ${space(4)};
- align-items: center;
- cursor: ${p => (p.loading ? 'progress' : p.locked ? 'text' : 'pointer')};
- color: ${getColor};
- transition: 0.1s color;
- user-select: none;
-`;
-
-const Content = styled('div')`
- display: flex;
- flex: 1;
- width: 0;
- white-space: nowrap;
- overflow: hidden;
- margin-right: ${space(1.5)};
-`;
-
-const StyledContent = styled('div')`
- overflow: hidden;
- text-overflow: ellipsis;
-`;
-
-const IconContainer = styled('span', {shouldForwardProp: isPropValid})`
- color: ${getColor};
- margin-right: ${space(1.5)};
- display: flex;
- font-size: ${p => p.theme.fontSizeMedium};
-`;
-
-const Hint = styled('div')`
- position: relative;
- top: ${space(0.25)};
- margin-right: ${space(1)};
-`;
-
-const StyledClose = styled(IconClose, {shouldForwardProp: isPropValid})`
- color: ${getColor};
- height: ${space(1.5)};
- width: ${space(1.5)};
- stroke-width: 1.5;
- padding: ${space(1)};
- box-sizing: content-box;
- margin: -${space(1)} 0px -${space(1)} -${space(1)};
-`;
-
-const ChevronWrapper = styled('div')`
- width: ${space(2)};
- height: ${space(2)};
- display: flex;
- align-items: center;
- justify-content: center;
-`;
-
-const StyledChevron = styled(IconChevron, {shouldForwardProp: isPropValid})<{
- isOpen: boolean;
-}>`
- color: ${getColor};
-`;
-
-const SettingsIconLink = styled(Link)`
- color: ${p => p.theme.gray300};
- align-items: center;
- display: inline-flex;
- justify-content: space-between;
- margin-right: ${space(1.5)};
- margin-left: ${space(1.0)};
- transition: 0.5s opacity ease-out;
-
- &:hover {
- color: ${p => p.theme.textColor};
- }
-`;
-
-const StyledLock = styled(IconLock)`
- margin-top: ${space(0.75)};
- stroke-width: 1.5;
-`;
-
-export default forwardRef((props, ref) => (
-
-));
diff --git a/static/app/components/organizations/hybridFilter.spec.tsx b/static/app/components/organizations/hybridFilter.spec.tsx
index 68661ecab69f9a..4bc239c5b28e24 100644
--- a/static/app/components/organizations/hybridFilter.spec.tsx
+++ b/static/app/components/organizations/hybridFilter.spec.tsx
@@ -63,8 +63,8 @@ describe('ProjectPageFilter', function () {
// Clicking on the checkboxes in Option One & Option Two _adds_ the options to the
// current selection state (multiple selection mode)
await userEvent.click(screen.getByRole('button', {expanded: false}));
- await fireEvent.click(screen.getByRole('checkbox', {name: 'Select Option One'}));
- await fireEvent.click(screen.getByRole('checkbox', {name: 'Select Option Two'}));
+ fireEvent.click(screen.getByRole('checkbox', {name: 'Select Option One'}));
+ fireEvent.click(screen.getByRole('checkbox', {name: 'Select Option Two'}));
expect(screen.getByRole('checkbox', {name: 'Select Option One'})).toBeChecked();
expect(screen.getByRole('checkbox', {name: 'Select Option Two'})).toBeChecked();
@@ -92,10 +92,10 @@ describe('ProjectPageFilter', function () {
which: 17,
ctrlKey: true,
};
- await fireEvent.keyDown(screen.getByRole('grid'), ctrlKeyOpts); // Press & hold Ctrl
+ fireEvent.keyDown(screen.getByRole('grid'), ctrlKeyOpts); // Press & hold Ctrl
await userEvent.click(screen.getByRole('row', {name: 'Option One'}));
- await fireEvent.click(screen.getByRole('row', {name: 'Option Two'}));
- await fireEvent.keyUp(screen.getByRole('grid'), ctrlKeyOpts); // Release Ctrl
+ fireEvent.click(screen.getByRole('row', {name: 'Option Two'}));
+ fireEvent.keyUp(screen.getByRole('grid'), ctrlKeyOpts); // Release Ctrl
expect(screen.getByRole('checkbox', {name: 'Select Option One'})).not.toBeChecked();
expect(screen.getByRole('checkbox', {name: 'Select Option Two'})).not.toBeChecked();
@@ -110,7 +110,7 @@ describe('ProjectPageFilter', function () {
// Open the menu, select Option One
await userEvent.click(screen.getByRole('button', {expanded: false}));
- await fireEvent.click(screen.getByRole('checkbox', {name: 'Select Option One'}));
+ fireEvent.click(screen.getByRole('checkbox', {name: 'Select Option One'}));
// Press Cancel
await userEvent.click(screen.getByRole('button', {name: 'Cancel'}));
@@ -140,7 +140,7 @@ describe('ProjectPageFilter', function () {
expect(screen.queryByRole('button', {name: 'Reset'})).not.toBeInTheDocument();
// Select Option Two, Reset button shows up
- await fireEvent.click(screen.getByRole('checkbox', {name: 'Select Option Two'}));
+ fireEvent.click(screen.getByRole('checkbox', {name: 'Select Option Two'}));
expect(screen.getByRole('checkbox', {name: 'Select Option Two'})).toBeChecked();
expect(screen.getByRole('button', {name: 'Reset'})).toBeInTheDocument();
@@ -181,7 +181,7 @@ describe('ProjectPageFilter', function () {
// focused to activate it. With RTL, however, onChange events aren't fired on Space
// key press (https://github.com/testing-library/react-testing-library/issues/122),
// so we'll have to simulate a click event instead.
- await fireEvent.click(screen.getByRole('checkbox', {name: 'Select Option One'}));
+ fireEvent.click(screen.getByRole('checkbox', {name: 'Select Option One'}));
expect(screen.getByRole('checkbox', {name: 'Select Option One'})).toBeChecked();
// Click "Apply" button, onChange is called
diff --git a/static/app/components/organizations/hybridFilter.tsx b/static/app/components/organizations/hybridFilter.tsx
index 82ac8be78b7aee..eb9f17e961dc58 100644
--- a/static/app/components/organizations/hybridFilter.tsx
+++ b/static/app/components/organizations/hybridFilter.tsx
@@ -324,12 +324,12 @@ export function HybridFilter({
// A modifier key is being pressed --> enter multiple selection mode
if (multiple && modifierKeyPressed) {
!modifierTipSeen && setModifierTipSeen(true);
- toggleOption(diff[0]);
+ toggleOption(diff[0]!);
return;
}
// Only one option was clicked on --> use single, direct selection mode
- onReplace?.(diff[0]);
+ onReplace?.(diff[0]!);
commit(diff);
},
[
diff --git a/static/app/components/organizations/pageFilters/container.spec.tsx b/static/app/components/organizations/pageFilters/container.spec.tsx
index 67b89b59c739d1..76a5e97525280d 100644
--- a/static/app/components/organizations/pageFilters/container.spec.tsx
+++ b/static/app/components/organizations/pageFilters/container.spec.tsx
@@ -225,9 +225,9 @@ describe('PageFiltersContainer', function () {
await waitFor(() => {
expect(globalActions.updateDateTime).not.toHaveBeenCalled();
- expect(globalActions.updateProjects).not.toHaveBeenCalled();
- expect(globalActions.updateEnvironments).not.toHaveBeenCalled();
});
+ expect(globalActions.updateProjects).not.toHaveBeenCalled();
+ expect(globalActions.updateEnvironments).not.toHaveBeenCalled();
expect(PageFiltersStore.getState()).toEqual({
isReady: true,
diff --git a/static/app/components/organizations/pageFilters/pageFilterDropdownButton.tsx b/static/app/components/organizations/pageFilters/pageFilterDropdownButton.tsx
deleted file mode 100644
index 305fd4887ce8b7..00000000000000
--- a/static/app/components/organizations/pageFilters/pageFilterDropdownButton.tsx
+++ /dev/null
@@ -1,29 +0,0 @@
-import styled from '@emotion/styled';
-
-import DropdownButton from 'sentry/components/dropdownButton';
-
-type Props = {
- /**
- * Highlights the button blue. For page filters this indicates the filter
- * has been desynced from the URL.
- */
- highlighted?: boolean;
-};
-
-const PageFilterDropdownButton = styled(DropdownButton)`
- width: 100%;
- text-overflow: ellipsis;
- ${p =>
- p.highlighted &&
- `
- &,
- &:active,
- &:hover,
- &:focus {
- background-color: ${p.theme.purple100};
- border-color: ${p.theme.purple200};
- }
- `}
-`;
-
-export default PageFilterDropdownButton;
diff --git a/static/app/components/organizations/pageFilters/pageFilterPinButton.tsx b/static/app/components/organizations/pageFilters/pageFilterPinButton.tsx
deleted file mode 100644
index f9212df92bdc77..00000000000000
--- a/static/app/components/organizations/pageFilters/pageFilterPinButton.tsx
+++ /dev/null
@@ -1,61 +0,0 @@
-import styled from '@emotion/styled';
-
-import {pinFilter} from 'sentry/actionCreators/pageFilters';
-import type {ButtonProps} from 'sentry/components/button';
-import {Button} from 'sentry/components/button';
-import {IconLock} from 'sentry/icons';
-import {t} from 'sentry/locale';
-import type {PinnedPageFilter} from 'sentry/types/core';
-import type {Organization} from 'sentry/types/organization';
-import {trackAnalytics} from 'sentry/utils/analytics';
-import usePageFilters from 'sentry/utils/usePageFilters';
-
-type Props = {
- filter: PinnedPageFilter;
- organization: Organization;
- size: Extract;
- className?: string;
-};
-
-function PageFilterPinButton({organization, filter, size, className}: Props) {
- const {pinnedFilters} = usePageFilters();
- const pinned = pinnedFilters.has(filter);
-
- const onPin = () => {
- trackAnalytics('page_filters.pin_click', {
- organization,
- filter,
- pin: !pinned,
- });
- pinFilter(filter, !pinned);
- };
-
- return (
- }
- title={t("Once locked, Sentry will remember this filter's value across pages.")}
- tooltipProps={{delay: 500}}
- >
- {pinned ? t('Locked') : t('Lock')}
-
- );
-}
-
-const PinButton = styled(Button)<{pinned: boolean; size: 'xs' | 'zero'}>`
- display: block;
- color: ${p => p.theme.textColor};
-
- :hover {
- color: ${p => p.theme.headingColor};
- }
- ${p => p.size === 'zero' && 'background: transparent'};
-`;
-
-export default PageFilterPinButton;
diff --git a/static/app/components/organizations/pageFilters/pageFilterPinIndicator.tsx b/static/app/components/organizations/pageFilters/pageFilterPinIndicator.tsx
deleted file mode 100644
index 87337e61998a61..00000000000000
--- a/static/app/components/organizations/pageFilters/pageFilterPinIndicator.tsx
+++ /dev/null
@@ -1,58 +0,0 @@
-import styled from '@emotion/styled';
-
-import {IconLock} from 'sentry/icons';
-import {t} from 'sentry/locale';
-import {space} from 'sentry/styles/space';
-import type {PinnedPageFilter} from 'sentry/types/core';
-import usePageFilters from 'sentry/utils/usePageFilters';
-
-type Props = {
- children: React.ReactNode;
- filter: PinnedPageFilter;
-};
-
-function PageFilterPinIndicator({children, filter}: Props) {
- const {pinnedFilters} = usePageFilters();
- const pinned = pinnedFilters.has(filter);
-
- return (
-
- {children}
- {pinned && (
-
-
-
- )}
-
- );
-}
-
-export default PageFilterPinIndicator;
-
-const Wrap = styled('div')`
- position: relative;
- display: flex;
- align-items: center;
- transform: translateX(-${space(0.25)});
-`;
-
-const IndicatorWrap = styled('div')`
- position: absolute;
- bottom: 0;
- right: 0;
- transform: translate(50%, 35%);
- border-radius: 50%;
- background-color: ${p => p.theme.background};
-
- padding: ${space(0.25)};
-
- display: flex;
- align-items: center;
- justify-content: center;
-`;
-
-const StyledIconLock = styled(IconLock)`
- width: 0.5rem;
- height: 0.5rem;
- color: ${p => p.theme.textColor};
-`;
diff --git a/static/app/components/organizations/pageFilters/parse.spec.tsx b/static/app/components/organizations/pageFilters/parse.spec.tsx
index b0574e813f49f4..051b0c26e2a5a3 100644
--- a/static/app/components/organizations/pageFilters/parse.spec.tsx
+++ b/static/app/components/organizations/pageFilters/parse.spec.tsx
@@ -207,9 +207,9 @@ describe('parseStatsPeriod', function () {
});
it('should return default statsPeriod if it is not provided or is invalid', function () {
- expect(parseStatsPeriod('invalid')).toEqual(undefined);
- expect(parseStatsPeriod('24f')).toEqual(undefined);
- expect(parseStatsPeriod('')).toEqual(undefined);
+ expect(parseStatsPeriod('invalid')).toBeUndefined();
+ expect(parseStatsPeriod('24f')).toBeUndefined();
+ expect(parseStatsPeriod('')).toBeUndefined();
expect(parseStatsPeriod('24')).toEqual({period: '24', periodLength: 's'});
});
diff --git a/static/app/components/organizations/pageFilters/utils.tsx b/static/app/components/organizations/pageFilters/utils.tsx
index d4abbf37a0f1c5..98f17cd9138eec 100644
--- a/static/app/components/organizations/pageFilters/utils.tsx
+++ b/static/app/components/organizations/pageFilters/utils.tsx
@@ -5,7 +5,7 @@ import pick from 'lodash/pick';
import pickBy from 'lodash/pickBy';
import {DEFAULT_STATS_PERIOD} from 'sentry/constants';
-import {DATE_TIME_KEYS, URL_PARAM} from 'sentry/constants/pageFilters';
+import {URL_PARAM} from 'sentry/constants/pageFilters';
import type {PageFilters} from 'sentry/types/core';
/**
@@ -35,13 +35,6 @@ export function extractSelectionParameters(query: Location['query']) {
return pickBy(pick(query, Object.values(URL_PARAM)), identity);
}
-/**
- * Extract the page filter datetime parameters from an object.
- */
-export function extractDatetimeSelectionParameters(query: Location['query']) {
- return pickBy(pick(query, Object.values(DATE_TIME_KEYS)), identity);
-}
-
/**
* Compare the non-utc values of two selections.
* Useful when re-fetching data based on page filters changing.
diff --git a/static/app/components/organizations/projectPageFilter/index.spec.tsx b/static/app/components/organizations/projectPageFilter/index.spec.tsx
index 4a116187aae2d6..2a2f37c3da2fd1 100644
--- a/static/app/components/organizations/projectPageFilter/index.spec.tsx
+++ b/static/app/components/organizations/projectPageFilter/index.spec.tsx
@@ -83,11 +83,11 @@ describe('ProjectPageFilter', function () {
await userEvent.click(screen.getByRole('button', {name: 'My Projects'}));
// Deselect project-1 & project-2 by clicking on their checkboxes
- await fireEvent.click(screen.getByRole('checkbox', {name: 'Select project-1'}));
- await fireEvent.click(screen.getByRole('checkbox', {name: 'Select project-2'}));
+ fireEvent.click(screen.getByRole('checkbox', {name: 'Select project-1'}));
+ fireEvent.click(screen.getByRole('checkbox', {name: 'Select project-2'}));
// Select project-3 by clicking on its checkbox
- await fireEvent.click(screen.getByRole('checkbox', {name: 'Select project-3'}));
+ fireEvent.click(screen.getByRole('checkbox', {name: 'Select project-3'}));
// Click "Apply"
await userEvent.click(screen.getByRole('button', {name: 'Apply'}));
diff --git a/static/app/components/organizations/projectPageFilter/index.tsx b/static/app/components/organizations/projectPageFilter/index.tsx
index 6917dd03b8dec6..393d75d57e1ec8 100644
--- a/static/app/components/organizations/projectPageFilter/index.tsx
+++ b/static/app/components/organizations/projectPageFilter/index.tsx
@@ -171,10 +171,10 @@ export function ProjectPageFilter({
if (!val.length) {
return allowMultiple
? memberProjects.map(p => parseInt(p.id, 10))
- : [parseInt(memberProjects[0]?.id, 10)];
+ : [parseInt(memberProjects[0]!?.id, 10)];
}
- return allowMultiple ? val : [val[0]];
+ return allowMultiple ? val : [val[0]!];
},
[memberProjects, allowMultiple]
);
diff --git a/static/app/components/organizations/projectPageFilter/trigger.tsx b/static/app/components/organizations/projectPageFilter/trigger.tsx
index 93b09ff1281862..64fdf90744d256 100644
--- a/static/app/components/organizations/projectPageFilter/trigger.tsx
+++ b/static/app/components/organizations/projectPageFilter/trigger.tsx
@@ -54,7 +54,7 @@ function BaseProjectPageFilterTrigger(
// Show 2 projects only if the combined string does not exceed maxTitleLength.
// Otherwise show only 1 project.
const projectsToShow =
- selectedProjects[0]?.slug?.length + selectedProjects[1]?.slug?.length <= 23
+ selectedProjects[0]!?.slug?.length + selectedProjects[1]!?.slug?.length <= 23
? selectedProjects.slice(0, 2)
: selectedProjects.slice(0, 1);
diff --git a/static/app/components/performance/searchBar.spec.tsx b/static/app/components/performance/searchBar.spec.tsx
index 61b618cf1f32c1..b3f772c23bd4b6 100644
--- a/static/app/components/performance/searchBar.spec.tsx
+++ b/static/app/components/performance/searchBar.spec.tsx
@@ -157,10 +157,11 @@ describe('SearchBar', () => {
render( );
- await userEvent.type(
- screen.getByRole('textbox'),
- 'GET /my-endpoint{ArrowDown}{Enter}'
- );
+ await userEvent.type(screen.getByRole('textbox'), 'GET /my-endpoint');
+
+ await screen.findByText('GET /my-endpoint');
+
+ await userEvent.keyboard('{ArrowDown}{Enter}');
expect(onSearch).toHaveBeenCalledTimes(1);
expect(onSearch).toHaveBeenCalledWith('transaction:"GET /my-endpoint"');
diff --git a/static/app/components/performance/searchBar.tsx b/static/app/components/performance/searchBar.tsx
index 70bec1c1122e12..79e16eedb018e1 100644
--- a/static/app/components/performance/searchBar.tsx
+++ b/static/app/components/performance/searchBar.tsx
@@ -2,25 +2,24 @@ import {useCallback, useRef, useState} from 'react';
import styled from '@emotion/styled';
import debounce from 'lodash/debounce';
+import {getSearchGroupWithItemMarkedActive} from 'sentry/components/deprecatedSmartSearchBar/utils';
import BaseSearchBar from 'sentry/components/searchBar';
-import {getSearchGroupWithItemMarkedActive} from 'sentry/components/smartSearchBar/utils';
import {DEFAULT_DEBOUNCE_DURATION} from 'sentry/constants';
import {t} from 'sentry/locale';
import type {Organization} from 'sentry/types/organization';
import {trackAnalytics} from 'sentry/utils/analytics';
-import {browserHistory} from 'sentry/utils/browserHistory';
import type EventView from 'sentry/utils/discover/eventView';
import {doDiscoverQuery} from 'sentry/utils/discover/genericDiscoverQuery';
import {parsePeriodToHours} from 'sentry/utils/duration/parsePeriodToHours';
import {MutableSearch} from 'sentry/utils/tokenizeSearch';
-import normalizeUrl from 'sentry/utils/url/normalizeUrl';
import useApi from 'sentry/utils/useApi';
+import {useNavigate} from 'sentry/utils/useNavigate';
import useOnClickOutside from 'sentry/utils/useOnClickOutside';
import {transactionSummaryRouteWithQuery} from 'sentry/views/performance/transactionSummary/utils';
-import SearchDropdown from '../smartSearchBar/searchDropdown';
-import type {SearchGroup} from '../smartSearchBar/types';
-import {ItemType} from '../smartSearchBar/types';
+import SearchDropdown from '../deprecatedSmartSearchBar/searchDropdown';
+import type {SearchGroup} from '../deprecatedSmartSearchBar/types';
+import {ItemType} from '../deprecatedSmartSearchBar/types';
const TRANSACTION_SEARCH_PERIOD = '14d';
@@ -45,6 +44,7 @@ function SearchBar(props: SearchBarProps) {
additionalConditions,
} = props;
+ const navigate = useNavigate();
const [searchResults, setSearchResults] = useState([]);
const transactionCount = searchResults[0]?.children?.length || 0;
const [highlightedItemIndex, setHighlightedItemIndex] = useState(-1);
@@ -97,12 +97,12 @@ function SearchBar(props: SearchBarProps) {
isDropdownOpen &&
transactionCount > 0
) {
- const currentHighlightedItem = searchResults[0].children[highlightedItemIndex];
+ const currentHighlightedItem = searchResults[0]!.children[highlightedItemIndex];
const nextHighlightedItemIndex =
(highlightedItemIndex + transactionCount + (key === 'ArrowUp' ? -1 : 1)) %
transactionCount;
setHighlightedItemIndex(nextHighlightedItemIndex);
- const nextHighlightedItem = searchResults[0].children[nextHighlightedItemIndex];
+ const nextHighlightedItem = searchResults[0]!.children[nextHighlightedItemIndex];
let newSearchResults = searchResults;
if (currentHighlightedItem) {
@@ -238,7 +238,7 @@ function SearchBar(props: SearchBarProps) {
query,
});
- browserHistory.push(normalizeUrl(next));
+ navigate(next);
};
const logDocsOpenedEvent = () => {
trackAnalytics('search.docs_opened', {
diff --git a/static/app/components/performance/spanSearchQueryBuilder.tsx b/static/app/components/performance/spanSearchQueryBuilder.tsx
index 8bef5f125bbad9..6eb88ce16ee0d4 100644
--- a/static/app/components/performance/spanSearchQueryBuilder.tsx
+++ b/static/app/components/performance/spanSearchQueryBuilder.tsx
@@ -167,6 +167,7 @@ export function EAPSpanSearchQueryBuilder({
stringTags,
getFilterTokenWarning,
supportedAggregates = [],
+ projects,
}: EAPSpanSearchQueryBuilderProps) {
const api = useApi();
const organization = useOrganization();
@@ -215,7 +216,7 @@ export function EAPSpanSearchQueryBuilder({
orgSlug: organization.slug,
fieldKey: tag.key,
search: queryString,
- projectIds: selection.projects.map(String),
+ projectIds: (projects ?? selection.projects).map(String),
endpointParams: normalizeDateTimeParams(selection.datetime),
dataset: 'spans',
});
@@ -224,7 +225,7 @@ export function EAPSpanSearchQueryBuilder({
throw new Error(`Unable to fetch event field values: ${e}`);
}
},
- [api, organization.slug, selection.projects, selection.datetime, numberTags]
+ [api, organization.slug, selection.projects, projects, selection.datetime, numberTags]
);
return (
diff --git a/static/app/components/performance/transactionSearchQueryBuilder.tsx b/static/app/components/performance/transactionSearchQueryBuilder.tsx
index 7de519a8021e72..47a795f7e30e46 100644
--- a/static/app/components/performance/transactionSearchQueryBuilder.tsx
+++ b/static/app/components/performance/transactionSearchQueryBuilder.tsx
@@ -34,7 +34,7 @@ interface TransactionSearchQueryBuilderProps {
filterKeyMenuWidth?: number;
onSearch?: (query: string, state: CallbackSearchState) => void;
placeholder?: string;
- projects?: PageFilters['projects'];
+ projects?: PageFilters['projects'] | Readonly;
trailingItems?: React.ReactNode;
}
diff --git a/static/app/components/performance/waterfall/rowDivider.tsx b/static/app/components/performance/waterfall/rowDivider.tsx
index 9e807274f43f74..7b94abd1d0f200 100644
--- a/static/app/components/performance/waterfall/rowDivider.tsx
+++ b/static/app/components/performance/waterfall/rowDivider.tsx
@@ -1,6 +1,6 @@
import styled from '@emotion/styled';
-import {IconAdd, IconFire, IconGraph, IconProfiling, IconSubtract} from 'sentry/icons';
+import {IconAdd, IconFire, IconProfiling, IconSubtract} from 'sentry/icons';
import {space} from 'sentry/styles/space';
import type {Aliases, Color} from 'sentry/utils/theme';
@@ -75,14 +75,6 @@ export function ErrorBadge() {
);
}
-export function MetricsBadge() {
- return (
-
-
-
- );
-}
-
export function EmbeddedTransactionBadge({
inTraceView = false,
expanded,
diff --git a/static/app/components/performance/waterfall/utils.spec.tsx b/static/app/components/performance/waterfall/utils.spec.tsx
index c284110f42a07c..e4664d79369cfd 100644
--- a/static/app/components/performance/waterfall/utils.spec.tsx
+++ b/static/app/components/performance/waterfall/utils.spec.tsx
@@ -16,7 +16,7 @@ describe('pickBarColor()', function () {
});
it('returns a random color when no predefined option is available', function () {
- const colorsAsArray = Object.keys(CHART_PALETTE).map(key => CHART_PALETTE[17][key]);
+ const colorsAsArray = Object.keys(CHART_PALETTE).map(key => CHART_PALETTE[17]![key]);
let randomColor = pickBarColor('a normal string');
expect(colorsAsArray).toContain(randomColor);
diff --git a/static/app/components/performance/waterfall/utils.tsx b/static/app/components/performance/waterfall/utils.tsx
index 63b7fed34ff53c..3589403bf789e5 100644
--- a/static/app/components/performance/waterfall/utils.tsx
+++ b/static/app/components/performance/waterfall/utils.tsx
@@ -238,13 +238,13 @@ const getLetterIndex = (letter: string): number => {
return index === -1 ? 0 : index;
};
-const colorsAsArray = Object.keys(CHART_PALETTE).map(key => CHART_PALETTE[17][key]);
+const colorsAsArray = Object.keys(CHART_PALETTE).map(key => CHART_PALETTE[17]![key]);
export const barColors = {
- default: CHART_PALETTE[17][4],
- transaction: CHART_PALETTE[17][8],
- http: CHART_PALETTE[17][10],
- db: CHART_PALETTE[17][17],
+ default: CHART_PALETTE[17]![4],
+ transaction: CHART_PALETTE[17]![8],
+ http: CHART_PALETTE[17]![10],
+ db: CHART_PALETTE[17]![17],
};
export const pickBarColor = (input: string | undefined): string => {
@@ -252,17 +252,17 @@ export const pickBarColor = (input: string | undefined): string => {
// That way colors stay consistent between transactions.
if (!input || input.length < 3) {
- return CHART_PALETTE[17][4];
+ return CHART_PALETTE[17]![4]!;
}
if (barColors[input]) {
return barColors[input];
}
- const letterIndex1 = getLetterIndex(input[0]);
- const letterIndex2 = getLetterIndex(input[1]);
- const letterIndex3 = getLetterIndex(input[2]);
- const letterIndex4 = getLetterIndex(input[3]);
+ const letterIndex1 = getLetterIndex(input[0]!);
+ const letterIndex2 = getLetterIndex(input[1]!);
+ const letterIndex3 = getLetterIndex(input[2]!);
+ const letterIndex4 = getLetterIndex(input[3]!);
return colorsAsArray[
(letterIndex1 + letterIndex2 + letterIndex3 + letterIndex4) % colorsAsArray.length
diff --git a/static/app/components/performanceOnboarding/sidebar.tsx b/static/app/components/performanceOnboarding/sidebar.tsx
index d5e5996aa394f4..23a88d7fe47b63 100644
--- a/static/app/components/performanceOnboarding/sidebar.tsx
+++ b/static/app/components/performanceOnboarding/sidebar.tsx
@@ -85,7 +85,7 @@ function PerformanceOnboardingSidebar(props: CommonSidebarProps) {
const priorityProjects: Project[] = [];
priorityProjectIds.forEach(projectId => {
- priorityProjects.push(projectMap[String(projectId)]);
+ priorityProjects.push(projectMap[String(projectId)]!);
});
// Among the project selection, find a project that has performance onboarding docs support, and has not sent
@@ -269,7 +269,7 @@ function OnboardingContent({currentProject}: {currentProject: Project}) {
api,
projectKeyId,
dsn,
- organization: organization,
+ organization,
platformKey: currentProject.platform || 'other',
projectId: currentProject.id,
projectSlug: currentProject.slug,
diff --git a/static/app/components/pickProjectToContinue.tsx b/static/app/components/pickProjectToContinue.tsx
index b15d0fb082a4f1..b04309d63a6871 100644
--- a/static/app/components/pickProjectToContinue.tsx
+++ b/static/app/components/pickProjectToContinue.tsx
@@ -49,7 +49,7 @@ function PickProjectToContinue({
// if the project in URL is missing, but this release belongs to only one project, redirect there
if (projects.length === 1) {
- router.replace(path + projects[0].id);
+ router.replace(path + projects[0]!.id);
return null;
}
diff --git a/static/app/components/platformPicker.spec.tsx b/static/app/components/platformPicker.spec.tsx
index a0a31b356c4904..6e127f16ce96c8 100644
--- a/static/app/components/platformPicker.spec.tsx
+++ b/static/app/components/platformPicker.spec.tsx
@@ -104,7 +104,7 @@ describe('PlatformPicker', function () {
const platformNames = screen.getAllByRole('heading', {level: 3});
platformNames.forEach((platform, index) => {
- expect(platform).toHaveTextContent(alphabeticallyOrderedPlatformNames[index]);
+ expect(platform).toHaveTextContent(alphabeticallyOrderedPlatformNames[index]!);
});
});
diff --git a/static/app/components/platformPicker.tsx b/static/app/components/platformPicker.tsx
index edee2eda9fe73c..c1bba265ab8e1d 100644
--- a/static/app/components/platformPicker.tsx
+++ b/static/app/components/platformPicker.tsx
@@ -72,8 +72,8 @@ class PlatformPicker extends Component {
};
state: State = {
- category: this.props.defaultCategory ?? categoryList[0].id,
- filter: this.props.noAutoFilter ? '' : (this.props.platform || '').split('-')[0],
+ category: this.props.defaultCategory ?? categoryList[0]!.id,
+ filter: this.props.noAutoFilter ? '' : (this.props.platform || '').split('-')[0]!,
};
get platformList() {
diff --git a/static/app/components/profiling/flamegraph/aggregateFlamegraph.tsx b/static/app/components/profiling/flamegraph/aggregateFlamegraph.tsx
index c7ab857c9ac7ea..95b293a55984cd 100644
--- a/static/app/components/profiling/flamegraph/aggregateFlamegraph.tsx
+++ b/static/app/components/profiling/flamegraph/aggregateFlamegraph.tsx
@@ -85,7 +85,7 @@ export function AggregateFlamegraph(props: AggregateFlamegraphProps): ReactEleme
},
// We skip position.view dependency because it will go into an infinite loop
- // eslint-disable-next-line react-hooks/exhaustive-deps
+
[flamegraph, flamegraphCanvas, flamegraphTheme]
);
diff --git a/static/app/components/profiling/flamegraph/continuousFlamegraph.tsx b/static/app/components/profiling/flamegraph/continuousFlamegraph.tsx
index cbe1d3931fe460..a55ddbacc11dcf 100644
--- a/static/app/components/profiling/flamegraph/continuousFlamegraph.tsx
+++ b/static/app/components/profiling/flamegraph/continuousFlamegraph.tsx
@@ -165,11 +165,11 @@ function convertContinuousProfileMeasurementsToUIFrames(
};
for (let i = 0; i < measurement.values.length; i++) {
- const value = measurement.values[i];
+ const value = measurement.values[i]!;
const next = measurement.values[i + 1] ?? value;
measurements.values.push({
- elapsed: next.timestamp - value.timestamp,
+ elapsed: next!.timestamp - value.timestamp,
value: value.value,
});
}
@@ -207,7 +207,7 @@ function findLongestMatchingFrame(
}
for (let i = 0; i < frame.children.length; i++) {
- frames.push(frame.children[i]);
+ frames.push(frame.children[i]!);
}
}
@@ -425,8 +425,8 @@ export function ContinuousFlamegraph(): ReactElement {
let offset = 0;
for (let i = 0; i < measurements.values.length; i++) {
- const value = measurements.values[i];
- const next = measurements.values[i + 1] ?? value;
+ const value = measurements.values[i]!;
+ const next = measurements.values[i + 1]! ?? value;
offset += (next.timestamp - value.timestamp) * 1e3;
values.push({
@@ -467,8 +467,8 @@ export function ContinuousFlamegraph(): ReactElement {
let offset = 0;
for (let i = 0; i < measurements.values.length; i++) {
- const value = measurements.values[i];
- const next = measurements.values[i + 1] ?? value;
+ const value = measurements.values[i]!;
+ const next = measurements.values[i + 1]! ?? value;
offset += (next.timestamp - value.timestamp) * 1e3;
values.push({
@@ -502,8 +502,8 @@ export function ContinuousFlamegraph(): ReactElement {
let offset = 0;
for (let i = 0; i < memory_footprint.values.length; i++) {
- const value = memory_footprint.values[i];
- const next = memory_footprint.values[i + 1] ?? value;
+ const value = memory_footprint.values[i]!;
+ const next = memory_footprint.values[i + 1]! ?? value;
offset += (next.timestamp - value.timestamp) * 1e3;
values.push({
@@ -525,8 +525,8 @@ export function ContinuousFlamegraph(): ReactElement {
let offset = 0;
for (let i = 0; i < native_memory_footprint.values.length; i++) {
- const value = native_memory_footprint.values[i];
- const next = native_memory_footprint.values[i + 1] ?? value;
+ const value = native_memory_footprint.values[i]!;
+ const next = native_memory_footprint.values[i + 1]! ?? value;
offset += (next.timestamp - value.timestamp) * 1e3;
values.push({
diff --git a/static/app/components/profiling/flamegraph/deprecatedAggregateFlamegraph.tsx b/static/app/components/profiling/flamegraph/deprecatedAggregateFlamegraph.tsx
index f90210f1760126..2cde654bab7769 100644
--- a/static/app/components/profiling/flamegraph/deprecatedAggregateFlamegraph.tsx
+++ b/static/app/components/profiling/flamegraph/deprecatedAggregateFlamegraph.tsx
@@ -148,7 +148,7 @@ export function DeprecatedAggregateFlamegraph(
},
// We skip position.view dependency because it will go into an infinite loop
- // eslint-disable-next-line react-hooks/exhaustive-deps
+
[flamegraph, flamegraphCanvas, flamegraphTheme]
);
@@ -172,7 +172,6 @@ export function DeprecatedAggregateFlamegraph(
});
// We skip `flamegraphCanvas` as it causes an infinite loop
- // eslint-disable-next-line react-hooks/exhaustive-deps
}, [flamegraph, setFlamegraphThemeMutation, flamegraphCanvas?.logicalSpace.height]);
// Uses a useLayoutEffect to ensure that these top level/global listeners are added before
diff --git a/static/app/components/profiling/flamegraph/differentialFlamegraph.tsx b/static/app/components/profiling/flamegraph/differentialFlamegraph.tsx
index d506850fdfb949..a60297eb7f1ad0 100644
--- a/static/app/components/profiling/flamegraph/differentialFlamegraph.tsx
+++ b/static/app/components/profiling/flamegraph/differentialFlamegraph.tsx
@@ -72,7 +72,7 @@ export function DifferentialFlamegraph(props: DifferentialFlamegraphProps): Reac
},
// We skip position.view dependency because it will go into an infinite loop
- // eslint-disable-next-line react-hooks/exhaustive-deps
+
[props.differentialFlamegraph, flamegraphCanvas, flamegraphTheme]
);
diff --git a/static/app/components/profiling/flamegraph/flamegraph.spec.tsx b/static/app/components/profiling/flamegraph/flamegraph.spec.tsx
index e47d2700716ef6..0425bfb85616d7 100644
--- a/static/app/components/profiling/flamegraph/flamegraph.spec.tsx
+++ b/static/app/components/profiling/flamegraph/flamegraph.spec.tsx
@@ -1,13 +1,7 @@
import {ProjectFixture} from 'sentry-fixture/project';
import {initializeOrg} from 'sentry-test/initializeOrg';
-import {
- act,
- findAllByTestId,
- render,
- screen,
- userEvent,
-} from 'sentry-test/reactTestingLibrary';
+import {act, render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
import ProjectsStore from 'sentry/stores/projectsStore';
import {useParams} from 'sentry/utils/useParams';
@@ -165,12 +159,12 @@ describe('Flamegraph', function () {
{organization: initializeOrg().organization}
);
- const frames = await findAllByTestId(document.body, 'flamegraph-frame', undefined, {
+ const frames = await screen.findAllByTestId('flamegraph-frame', undefined, {
timeout: 5000,
});
// 1 for main view and 1 for minimap
- expect(frames.length).toBe(2);
+ expect(frames).toHaveLength(2);
});
it('reads preferences from qs', async function () {
diff --git a/static/app/components/profiling/flamegraph/flamegraph.tsx b/static/app/components/profiling/flamegraph/flamegraph.tsx
index a02ce26d2a4fd4..3e35201a064573 100644
--- a/static/app/components/profiling/flamegraph/flamegraph.tsx
+++ b/static/app/components/profiling/flamegraph/flamegraph.tsx
@@ -132,7 +132,7 @@ function convertProfileMeasurementsToUIFrames(
};
for (let i = 0; i < measurement.values.length; i++) {
- const value = measurement.values[i];
+ const value = measurement.values[i]!;
measurements.values.push({
elapsed: value.elapsed_since_start_ns,
@@ -173,7 +173,7 @@ function findLongestMatchingFrame(
}
for (let i = 0; i < frame.children.length; i++) {
- frames.push(frame.children[i]);
+ frames.push(frame.children[i]!);
}
}
@@ -449,7 +449,7 @@ function Flamegraph(): ReactElement {
const values: ProfileSeriesMeasurement['values'] = [];
for (let i = 0; i < measurements.values.length; i++) {
- const value = measurements.values[i];
+ const value = measurements.values[i]!;
values.push({
value: value.value,
elapsed: value.elapsed_since_start_ns,
@@ -478,7 +478,7 @@ function Flamegraph(): ReactElement {
const values: ProfileSeriesMeasurement['values'] = [];
for (let i = 0; i < memory_footprint.values.length; i++) {
- const value = memory_footprint.values[i];
+ const value = memory_footprint.values[i]!;
values.push({
value: value.value,
elapsed: value.elapsed_since_start_ns,
@@ -497,7 +497,7 @@ function Flamegraph(): ReactElement {
const values: ProfileSeriesMeasurement['values'] = [];
for (let i = 0; i < native_memory_footprint.values.length; i++) {
- const value = native_memory_footprint.values[i];
+ const value = native_memory_footprint.values[i]!;
values.push({
value: value.value,
elapsed: value.elapsed_since_start_ns,
diff --git a/static/app/components/profiling/flamegraph/flamegraphChartTooltip.tsx b/static/app/components/profiling/flamegraph/flamegraphChartTooltip.tsx
index 02ac3aadbec950..328d1202c37539 100644
--- a/static/app/components/profiling/flamegraph/flamegraphChartTooltip.tsx
+++ b/static/app/components/profiling/flamegraph/flamegraphChartTooltip.tsx
@@ -59,7 +59,7 @@ export function FlamegraphChartTooltip({
/>
{p.name}:
- {chart.tooltipFormatter(p.points[0].y)}
+ {chart.tooltipFormatter(p.points[0]!.y)}
diff --git a/static/app/components/profiling/flamegraph/flamegraphDrawer/flamegraphDrawer.tsx b/static/app/components/profiling/flamegraph/flamegraphDrawer/flamegraphDrawer.tsx
index 75aad0f8a04977..00e15b320e4034 100644
--- a/static/app/components/profiling/flamegraph/flamegraphDrawer/flamegraphDrawer.tsx
+++ b/static/app/components/profiling/flamegraph/flamegraphDrawer/flamegraphDrawer.tsx
@@ -264,7 +264,7 @@ const FlamegraphDrawer = memo(function FlamegraphDrawer(props: FlamegraphDrawerP
? props.profileTransaction.data
: null
}
- projectId={params.projectId}
+ projectId={params.projectId!}
profileGroup={props.profileGroup}
/>
) : null}
diff --git a/static/app/components/profiling/flamegraph/flamegraphOverlays/profileDragDropImport.tsx b/static/app/components/profiling/flamegraph/flamegraphOverlays/profileDragDropImport.tsx
index e7495f1be38133..fd7fe5745e52d9 100644
--- a/static/app/components/profiling/flamegraph/flamegraphOverlays/profileDragDropImport.tsx
+++ b/static/app/components/profiling/flamegraph/flamegraphOverlays/profileDragDropImport.tsx
@@ -25,7 +25,7 @@ function ProfileDragDropImport({
evt.preventDefault();
evt.stopPropagation();
- const file = evt.dataTransfer.items[0].getAsFile();
+ const file = evt.dataTransfer.items[0]!.getAsFile();
if (file) {
setDropState('processing');
diff --git a/static/app/components/profiling/flamegraph/flamegraphPreview.spec.tsx b/static/app/components/profiling/flamegraph/flamegraphPreview.spec.tsx
index 78daa1ca754081..83869f36d06a68 100644
--- a/static/app/components/profiling/flamegraph/flamegraphPreview.spec.tsx
+++ b/static/app/components/profiling/flamegraph/flamegraphPreview.spec.tsx
@@ -41,7 +41,7 @@ describe('computePreviewConfigView', function () {
// y is 0 here because the config view is taller than the flamegraph
expect(previewConfigView).toEqual(new Rect(0, 0, 2, 3));
- expect(mode).toEqual('anchorTop');
+ expect(mode).toBe('anchorTop');
});
it('uses max depth', function () {
@@ -81,7 +81,7 @@ describe('computePreviewConfigView', function () {
// y is 1 here because the config view has height 2 so it can only
// show 2 frames and we show the inner most frames
expect(previewConfigView).toEqual(new Rect(0, 1, 2, 2));
- expect(mode).toEqual('anchorBottom');
+ expect(mode).toBe('anchorBottom');
});
it('uses max depth in window', function () {
@@ -122,7 +122,7 @@ describe('computePreviewConfigView', function () {
// y is 1 here because the config view has height 2 so it can only
// show 2 frames and we show the inner most frames
expect(previewConfigView).toEqual(new Rect(1, 1, 2, 2));
- expect(mode).toEqual('anchorBottom');
+ expect(mode).toBe('anchorBottom');
});
it('uses 0 when view is taller than profile', function () {
@@ -161,7 +161,7 @@ describe('computePreviewConfigView', function () {
// y is 0 here because the config view has height 3
// so the whole flamechart fits
expect(previewConfigView).toEqual(new Rect(0, 0, 2, 3));
- expect(mode).toEqual('anchorTop');
+ expect(mode).toBe('anchorTop');
});
it('uses parent frame depth', function () {
@@ -199,7 +199,7 @@ describe('computePreviewConfigView', function () {
// y is 1 here because we found a frame `f1` that is wraps
// around the window at depth 1
expect(previewConfigView).toEqual(new Rect(1, 1, 2, 2));
- expect(mode).toEqual('anchorTop');
+ expect(mode).toBe('anchorTop');
});
it('uses max depth because there is room above parent to show more', function () {
@@ -238,6 +238,6 @@ describe('computePreviewConfigView', function () {
// so the whole flamechart fits even though the parent
// is at depth 1
expect(previewConfigView).toEqual(new Rect(1, 0, 2, 3));
- expect(mode).toEqual('anchorTop');
+ expect(mode).toBe('anchorTop');
});
});
diff --git a/static/app/components/profiling/flamegraph/flamegraphPreview.tsx b/static/app/components/profiling/flamegraph/flamegraphPreview.tsx
index e38af70780bf2d..5bcb5c362ef468 100644
--- a/static/app/components/profiling/flamegraph/flamegraphPreview.tsx
+++ b/static/app/components/profiling/flamegraph/flamegraphPreview.tsx
@@ -332,7 +332,7 @@ export function computePreviewConfigView(
}
for (let i = 0; i < frame.children.length; i++) {
- frames.push(frame.children[i]);
+ frames.push(frame.children[i]!);
}
}
diff --git a/static/app/components/profiling/flamegraph/flamegraphToolbar/flamegraphSearch.tsx b/static/app/components/profiling/flamegraph/flamegraphToolbar/flamegraphSearch.tsx
index f9571c828f8f06..b6322a43ac6e2a 100644
--- a/static/app/components/profiling/flamegraph/flamegraphToolbar/flamegraphSearch.tsx
+++ b/static/app/components/profiling/flamegraph/flamegraphToolbar/flamegraphSearch.tsx
@@ -171,7 +171,7 @@ function yieldingRafFrameSearch(
const searchFramesFunction = isRegExpSearch ? searchFrameRegExp : searchFrameFzf;
const searchSpansFunction = isRegExpSearch ? searchSpanRegExp : searchSpanFzf;
- const searchQuery = isRegExpSearch ? lookup : lowercaseQuery;
+ const searchQuery = isRegExpSearch ? lookup! : lowercaseQuery;
function searchFramesAndSpans() {
const start = performance.now();
diff --git a/static/app/components/profiling/profileHeader.tsx b/static/app/components/profiling/profileHeader.tsx
index 3783436280c725..f72dfe65126385 100644
--- a/static/app/components/profiling/profileHeader.tsx
+++ b/static/app/components/profiling/profileHeader.tsx
@@ -51,7 +51,7 @@ function ProfileHeader({transaction, projectId, eventId}: ProfileHeaderProps) {
traceSlug: transaction.contexts?.trace?.trace_id ?? '',
location,
organization,
- transactionName: transactionName,
+ transactionName,
})
: null;
diff --git a/static/app/components/profiling/suspectFunctions/suspectFunctionsTable.tsx b/static/app/components/profiling/suspectFunctions/suspectFunctionsTable.tsx
index c12594b5584e7f..55e6c60b73ded5 100644
--- a/static/app/components/profiling/suspectFunctions/suspectFunctionsTable.tsx
+++ b/static/app/components/profiling/suspectFunctions/suspectFunctionsTable.tsx
@@ -1,4 +1,4 @@
-import {Fragment, useCallback, useMemo, useState} from 'react';
+import {Fragment, useCallback, useMemo, useRef, useState} from 'react';
import styled from '@emotion/styled';
import clamp from 'lodash/clamp';
@@ -110,9 +110,9 @@ export function SuspectFunctionsTable({
return sortedMetrics.slice(pagination.start, pagination.end);
}, [sortedMetrics, pagination]);
- const {tableStyles} = useTableStyles({
- items: COLUMNS,
- });
+ const fields = COLUMNS.map(column => column.value);
+ const tableRef = useRef(null);
+ const {initialTableStyles} = useTableStyles(fields, tableRef);
const baggage: RenderFunctionBaggage = {
location,
@@ -139,7 +139,7 @@ export function SuspectFunctionsTable({
/>
-
+
{COLUMNS.map((column, i) => {
diff --git a/static/app/components/projects/bookmarkStar.spec.tsx b/static/app/components/projects/bookmarkStar.spec.tsx
index b7249a15684e37..7b36debaa50247 100644
--- a/static/app/components/projects/bookmarkStar.spec.tsx
+++ b/static/app/components/projects/bookmarkStar.spec.tsx
@@ -7,22 +7,14 @@ import BookmarkStar from 'sentry/components/projects/bookmarkStar';
import ProjectsStore from 'sentry/stores/projectsStore';
describe('BookmarkStar', function () {
- const project = ProjectFixture();
-
- beforeEach(function () {
- ProjectsStore.loadInitialData([project]);
- });
-
afterEach(function () {
ProjectsStore.reset();
MockApiClient.clearMockResponses();
});
- it('renders', function () {
- render( );
- });
-
it('can star', async function () {
+ const project = ProjectFixture();
+ ProjectsStore.loadInitialData([project]);
render( );
const projectMock = MockApiClient.addMockResponse({
@@ -46,12 +38,9 @@ describe('BookmarkStar', function () {
});
it('can unstar', async function () {
- render(
-
- );
+ const project = ProjectFixture({isBookmarked: true});
+ ProjectsStore.loadInitialData([project]);
+ render( );
const projectMock = MockApiClient.addMockResponse({
url: '/projects/org-slug/project-slug/',
diff --git a/static/app/components/projects/bookmarkStar.tsx b/static/app/components/projects/bookmarkStar.tsx
index 9c606e569b4873..0d1c8bdd83f0ff 100644
--- a/static/app/components/projects/bookmarkStar.tsx
+++ b/static/app/components/projects/bookmarkStar.tsx
@@ -24,20 +24,20 @@ function BookmarkStar({className, organization, project, onToggle}: Props) {
const [isBookmarked, setIsBookmarked] = useState(project.isBookmarked);
const {mutate: handleBookmarkToggle, isPending: isBookmarking} = useMutation({
- mutationFn: () => {
+ mutationFn: (variables: {isBookmarked: boolean}) => {
return update(api, {
orgId: organization.slug,
projectId: project.slug,
- data: {isBookmarked: !isBookmarked},
+ data: {isBookmarked: variables.isBookmarked},
});
},
- onMutate: () => {
- onToggle?.(isBookmarked);
- setIsBookmarked(current => !current);
+ onMutate: variables => {
+ onToggle?.(variables.isBookmarked);
+ setIsBookmarked(variables.isBookmarked);
},
- onError: () => {
+ onError: (_data, variables) => {
addErrorMessage(t('Unable to toggle bookmark for %s', project.slug));
- setIsBookmarked(current => !current);
+ setIsBookmarked(!variables.isBookmarked);
},
});
@@ -49,7 +49,7 @@ function BookmarkStar({className, organization, project, onToggle}: Props) {
aria-label={label}
aria-pressed={isBookmarked}
busy={isBookmarking}
- onClick={() => handleBookmarkToggle()}
+ onClick={() => handleBookmarkToggle({isBookmarked: !isBookmarked})}
size="zero"
borderless
className={className}
diff --git a/static/app/components/projects/missingProjectMembership.tsx b/static/app/components/projects/missingProjectMembership.tsx
index 94912788222c8b..8110ca41bac49b 100644
--- a/static/app/components/projects/missingProjectMembership.tsx
+++ b/static/app/components/projects/missingProjectMembership.tsx
@@ -126,14 +126,14 @@ class MissingProjectMembership extends Component {
const teamAccess = [
{
label: t('Request Access'),
- options: this.getTeamsForAccess()[0].map(request => ({
+ options: this.getTeamsForAccess()[0]!.map(request => ({
value: request,
label: `#${request}`,
})),
},
{
label: t('Pending Requests'),
- options: this.getTeamsForAccess()[1].map(pending =>
+ options: this.getTeamsForAccess()[1]!.map(pending =>
this.getPendingTeamOption(pending)
),
},
diff --git a/static/app/components/quickTrace/index.spec.tsx b/static/app/components/quickTrace/index.spec.tsx
index 254aa2acefc80b..c1d94ef972cee7 100644
--- a/static/app/components/quickTrace/index.spec.tsx
+++ b/static/app/components/quickTrace/index.spec.tsx
@@ -142,7 +142,7 @@ describe('Quick Trace', function () {
/>
);
const nodes = await screen.findAllByTestId('event-node');
- expect(nodes.length).toEqual(1);
+ expect(nodes).toHaveLength(1);
expect(nodes[0]).toHaveTextContent('This Event');
});
@@ -162,7 +162,7 @@ describe('Quick Trace', function () {
/>
);
const nodes = await screen.findAllByTestId('event-node');
- expect(nodes.length).toEqual(2);
+ expect(nodes).toHaveLength(2);
['This Event', '1 Child'].forEach((text, i) =>
expect(nodes[i]).toHaveTextContent(text)
);
@@ -189,7 +189,7 @@ describe('Quick Trace', function () {
/>
);
const nodes = await screen.findAllByTestId('event-node');
- expect(nodes.length).toEqual(2);
+ expect(nodes).toHaveLength(2);
['This Event', '3 Children'].forEach((text, i) =>
expect(nodes[i]).toHaveTextContent(text)
);
@@ -211,7 +211,7 @@ describe('Quick Trace', function () {
/>
);
const nodes = await screen.findAllByTestId('event-node');
- expect(nodes.length).toEqual(2);
+ expect(nodes).toHaveLength(2);
['Parent', 'This Event'].forEach((text, i) =>
expect(nodes[i]).toHaveTextContent(text)
);
@@ -240,7 +240,7 @@ describe('Quick Trace', function () {
/>
);
const nodes = await screen.findAllByTestId('event-node');
- expect(nodes.length).toEqual(4);
+ expect(nodes).toHaveLength(4);
['Root', '1 Ancestor', 'Parent', 'This Event'].forEach((text, i) =>
expect(nodes[i]).toHaveTextContent(text)
);
@@ -274,7 +274,7 @@ describe('Quick Trace', function () {
/>
);
const nodes = await screen.findAllByTestId('event-node');
- expect(nodes.length).toEqual(4);
+ expect(nodes).toHaveLength(4);
['Root', '3 Ancestors', 'Parent', 'This Event'].forEach((text, i) =>
expect(nodes[i]).toHaveTextContent(text)
);
@@ -300,7 +300,7 @@ describe('Quick Trace', function () {
/>
);
const nodes = await screen.findAllByTestId('event-node');
- expect(nodes.length).toEqual(3);
+ expect(nodes).toHaveLength(3);
['This Event', '1 Child', '1 Descendant'].forEach((text, i) =>
expect(nodes[i]).toHaveTextContent(text)
);
@@ -333,7 +333,7 @@ describe('Quick Trace', function () {
/>
);
const nodes = await screen.findAllByTestId('event-node');
- expect(nodes.length).toEqual(3);
+ expect(nodes).toHaveLength(3);
['This Event', '1 Child', '3 Descendants'].forEach((text, i) =>
expect(nodes[i]).toHaveTextContent(text)
);
@@ -371,7 +371,7 @@ describe('Quick Trace', function () {
/>
);
const nodes = await screen.findAllByTestId('event-node');
- expect(nodes.length).toEqual(6);
+ expect(nodes).toHaveLength(6);
['Root', '3 Ancestors', 'Parent', 'This Event', '1 Child', '3 Descendants'].forEach(
(text, i) => expect(nodes[i]).toHaveTextContent(text)
);
@@ -402,7 +402,7 @@ describe('Quick Trace', function () {
/>
);
const nodes = await screen.findAllByTestId('event-node');
- expect(nodes.length).toEqual(6);
+ expect(nodes).toHaveLength(6);
[
makeTransactionHref('p0', 'e0', 't0'),
makeTransactionHref('p1', 'e1', 't1'),
@@ -411,7 +411,7 @@ describe('Quick Trace', function () {
makeTransactionHref('p4', 'e4', 't4'),
makeTransactionHref('p5', 'e5', 't5'),
].forEach((target, i) => {
- const linkNode = nodes[i].children[0];
+ const linkNode = nodes[i]!.children[0];
if (target) {
expect(linkNode).toHaveAttribute('href', target);
} else {
@@ -441,7 +441,7 @@ describe('Quick Trace', function () {
/>
);
const items = await screen.findAllByTestId('dropdown-item');
- expect(items.length).toEqual(3);
+ expect(items).toHaveLength(3);
// can't easily assert the target is correct since it uses an onClick handler
});
});
diff --git a/static/app/components/quickTrace/index.tsx b/static/app/components/quickTrace/index.tsx
index 78a914d8796ebd..03cd05ef3fd4ff 100644
--- a/static/app/components/quickTrace/index.tsx
+++ b/static/app/components/quickTrace/index.tsx
@@ -394,20 +394,20 @@ function EventNodeSelector({
const hoverText = totalErrors ? (
t('View the error for this Transaction')
) : (
-
+
);
const target = errors.length
- ? generateSingleErrorTarget(errors[0], organization, location, errorDest)
+ ? generateSingleErrorTarget(errors[0]!, organization, location, errorDest)
: perfIssues.length
- ? generateSingleErrorTarget(perfIssues[0], organization, location, errorDest)
+ ? generateSingleErrorTarget(perfIssues[0]!, organization, location, errorDest)
: generateLinkToEventInTraceView({
traceSlug,
- eventId: events[0].event_id,
- projectSlug: events[0].project_slug,
- timestamp: events[0].timestamp,
+ eventId: events[0]!.event_id,
+ projectSlug: events[0]!.project_slug,
+ timestamp: events[0]!.timestamp,
location,
organization,
- transactionName: events[0].transaction,
+ transactionName: events[0]!.transaction,
type: transactionDest,
});
return (
diff --git a/static/app/components/replays/breadcrumbs/breadcrumbItem.spec.tsx b/static/app/components/replays/breadcrumbs/breadcrumbItem.spec.tsx
index d2ab3d84f476a6..57ca7b981ebae2 100644
--- a/static/app/components/replays/breadcrumbs/breadcrumbItem.spec.tsx
+++ b/static/app/components/replays/breadcrumbs/breadcrumbItem.spec.tsx
@@ -22,12 +22,12 @@ describe('BreadcrumbItem', function () {
const mockMouseLeave = jest.fn();
render(
{}}
- startTimestampMs={MOCK_FRAME.timestampMs}
+ startTimestampMs={MOCK_FRAME!.timestampMs}
/>,
{organization}
);
diff --git a/static/app/components/replays/breadcrumbs/replayComparisonModal.tsx b/static/app/components/replays/breadcrumbs/replayComparisonModal.tsx
index 0bc960362be92e..25181bb2a8a0aa 100644
--- a/static/app/components/replays/breadcrumbs/replayComparisonModal.tsx
+++ b/static/app/components/replays/breadcrumbs/replayComparisonModal.tsx
@@ -60,19 +60,24 @@ export default function ReplayComparisonModal({
-
- {focusTrap ? (
- {
- focusTrap.pause();
- },
- onFormClose: () => {
- focusTrap.unpause();
- },
- }}
+
+ focusTrap?.pause()}
+ onBlur={() => focusTrap?.unpause()}
/>
- ) : null}
+ {focusTrap ? (
+ {
+ focusTrap.pause();
+ },
+ onFormClose: () => {
+ focusTrap.unpause();
+ },
+ }}
+ />
+ ) : null}
+
diff --git a/static/app/components/replays/breadcrumbs/replayTimelineEvents.tsx b/static/app/components/replays/breadcrumbs/replayTimelineEvents.tsx
index 0a701c42a5e0f1..eed579105eafe8 100644
--- a/static/app/components/replays/breadcrumbs/replayTimelineEvents.tsx
+++ b/static/app/components/replays/breadcrumbs/replayTimelineEvents.tsx
@@ -163,9 +163,9 @@ const getBackgroundGradient = ({
frameCount: number;
theme: Theme;
}) => {
- const c0 = theme[colors[0]] ?? colors[0];
- const c1 = theme[colors[1]] ?? colors[1] ?? c0;
- const c2 = theme[colors[2]] ?? colors[2] ?? c1;
+ const c0 = theme[colors[0]!] ?? colors[0]!;
+ const c1 = theme[colors[1]!] ?? colors[1]! ?? c0;
+ const c2 = theme[colors[2]!] ?? colors[2]! ?? c1;
if (frameCount === 1) {
return `background: ${c0};`;
diff --git a/static/app/components/replays/breadcrumbs/timelineGaps.tsx b/static/app/components/replays/breadcrumbs/timelineGaps.tsx
index 4118c3168fd753..37864ee118bd6a 100644
--- a/static/app/components/replays/breadcrumbs/timelineGaps.tsx
+++ b/static/app/components/replays/breadcrumbs/timelineGaps.tsx
@@ -48,7 +48,7 @@ export default function TimelineGaps({durationMs, startTimestampMs, videoEvents}
gaps: gaps.length,
max_gap: Math.max(...gaps.map(obj => parseFloat(obj.width))),
replay_duration: durationMs,
- organization: organization,
+ organization,
});
}, [durationMs, organization, gaps]);
diff --git a/static/app/components/replays/canvasReplayerPlugin.tsx b/static/app/components/replays/canvasReplayerPlugin.tsx
index 55fec93e00cdaa..a026dcdf0b1367 100644
--- a/static/app/components/replays/canvasReplayerPlugin.tsx
+++ b/static/app/components/replays/canvasReplayerPlugin.tsx
@@ -51,7 +51,7 @@ function findIndex(
const mid = Math.floor((start + end) / 2);
// Search lower half
- if (event.timestamp <= arr[mid].timestamp) {
+ if (event.timestamp <= arr[mid]!.timestamp) {
return findIndex(arr, event, start, mid - 1);
}
@@ -106,7 +106,7 @@ export function CanvasReplayerPlugin(events: eventWithTime[]): ReplayPlugin {
while (eventsToPrune.length) {
// Peek top of queue and see if event should be pruned, otherwise we can break out of the loop
if (
- Math.abs(event.timestamp - eventsToPrune[0].timestamp) <= BUFFER_TIME &&
+ Math.abs(event.timestamp - eventsToPrune[0]!.timestamp) <= BUFFER_TIME &&
eventsToPrune.length <= PRELOAD_SIZE
) {
break;
diff --git a/static/app/components/replays/configureMobileReplayCard.tsx b/static/app/components/replays/configureMobileReplayCard.tsx
new file mode 100644
index 00000000000000..052d68e1df7301
--- /dev/null
+++ b/static/app/components/replays/configureMobileReplayCard.tsx
@@ -0,0 +1,54 @@
+import {ClassNames} from '@emotion/react';
+
+import {Button} from 'sentry/components/button';
+import {Hovercard} from 'sentry/components/hovercard';
+import {ButtonContainer, Resource} from 'sentry/components/replays/configureReplayCard';
+import {IconQuestion} from 'sentry/icons';
+import {t} from 'sentry/locale';
+import {space} from 'sentry/styles/space';
+
+function ResourceButtons() {
+ return (
+
+
+
+
+
+ );
+}
+
+export default function ConfigureMobileReplayCard() {
+ return (
+
+ {({css}) => (
+ }
+ bodyClassName={css`
+ padding: ${space(1)};
+ `}
+ position="top-end"
+ >
+ }
+ aria-label={t('replay configure resources')}
+ >
+ {t('Configure Replay')}
+
+
+ )}
+
+ );
+}
diff --git a/static/app/components/replays/configureReplayCard.tsx b/static/app/components/replays/configureReplayCard.tsx
index 31fbf258a40536..818125850e5dc3 100644
--- a/static/app/components/replays/configureReplayCard.tsx
+++ b/static/app/components/replays/configureReplayCard.tsx
@@ -10,7 +10,7 @@ import {space} from 'sentry/styles/space';
import {trackAnalytics} from 'sentry/utils/analytics';
import useOrganization from 'sentry/utils/useOrganization';
-function Resource({
+export function Resource({
title,
subtitle,
link,
@@ -54,6 +54,11 @@ function ResourceButtons() {
subtitle={t('Unmask text (****) and unblock media (img, svg, video, etc.)')}
link="https://docs.sentry.io/platforms/javascript/session-replay/privacy/#privacy-configuration"
/>
+
>
+) {
return (
{({css}) => (
}
bodyClassName={css`
padding: ${space(1)};
diff --git a/static/app/components/replays/diff/replaySideBySideImageDiff.tsx b/static/app/components/replays/diff/replaySideBySideImageDiff.tsx
index 0606a645545032..15c0f1d0b88258 100644
--- a/static/app/components/replays/diff/replaySideBySideImageDiff.tsx
+++ b/static/app/components/replays/diff/replaySideBySideImageDiff.tsx
@@ -5,9 +5,9 @@ import {After, Before, DiffHeader} from 'sentry/components/replays/diff/utils';
import ReplayPlayer from 'sentry/components/replays/player/replayPlayer';
import ReplayPlayerMeasurer from 'sentry/components/replays/player/replayPlayerMeasurer';
import {space} from 'sentry/styles/space';
-import {ReplayPlayerEventsContextProvider} from 'sentry/utils/replays/playback/providers/replayPlayerEventsContext';
import {ReplayPlayerPluginsContextProvider} from 'sentry/utils/replays/playback/providers/replayPlayerPluginsContext';
import {ReplayPlayerStateContextProvider} from 'sentry/utils/replays/playback/providers/replayPlayerStateContext';
+import {ReplayReaderProvider} from 'sentry/utils/replays/playback/providers/replayReaderProvider';
import type ReplayReader from 'sentry/utils/replays/replayReader';
interface Props {
@@ -26,7 +26,7 @@ export function ReplaySideBySideImageDiff({leftOffsetMs, replay, rightOffsetMs}:
-
+
@@ -41,7 +41,7 @@ export function ReplaySideBySideImageDiff({leftOffsetMs, replay, rightOffsetMs}:
-
+
diff --git a/static/app/components/replays/diff/replaySliderDiff.tsx b/static/app/components/replays/diff/replaySliderDiff.tsx
index 0e608410db5212..6ff0c82388de58 100644
--- a/static/app/components/replays/diff/replaySliderDiff.tsx
+++ b/static/app/components/replays/diff/replaySliderDiff.tsx
@@ -8,9 +8,9 @@ import ReplayPlayerMeasurer from 'sentry/components/replays/player/replayPlayerM
import {space} from 'sentry/styles/space';
import {trackAnalytics} from 'sentry/utils/analytics';
import toPixels from 'sentry/utils/number/toPixels';
-import {ReplayPlayerEventsContextProvider} from 'sentry/utils/replays/playback/providers/replayPlayerEventsContext';
import {ReplayPlayerPluginsContextProvider} from 'sentry/utils/replays/playback/providers/replayPlayerPluginsContext';
import {ReplayPlayerStateContextProvider} from 'sentry/utils/replays/playback/providers/replayPlayerStateContext';
+import {ReplayReaderProvider} from 'sentry/utils/replays/playback/providers/replayReaderProvider';
import type ReplayReader from 'sentry/utils/replays/replayReader';
import {useDimensions} from 'sentry/utils/useDimensions';
import useOrganization from 'sentry/utils/useOrganization';
@@ -114,7 +114,7 @@ function DiffSides({
return (
-
+
@@ -137,7 +137,7 @@ function DiffSides({
-
+
diff --git a/static/app/components/replays/header/errorCounts.spec.tsx b/static/app/components/replays/header/errorCounts.spec.tsx
index afe8f7060dcb51..00a97e2765eb0d 100644
--- a/static/app/components/replays/header/errorCounts.spec.tsx
+++ b/static/app/components/replays/header/errorCounts.spec.tsx
@@ -45,13 +45,13 @@ describe('ErrorCounts', () => {
});
});
- it('should render 0 when there are no errors in the array', async () => {
+ it('should render 0 when there are no errors in the array', () => {
const errors = [];
render( , {
organization,
});
- const countNode = await screen.getByLabelText('number of errors');
+ const countNode = screen.getByLabelText('number of errors');
expect(countNode).toHaveTextContent('0');
});
@@ -62,7 +62,7 @@ describe('ErrorCounts', () => {
organization,
});
- const countNode = await screen.getByLabelText('number of errors');
+ const countNode = screen.getByLabelText('number of errors');
expect(countNode).toHaveTextContent('1');
const icon = await screen.findByTestId('platform-icon-javascript');
@@ -85,7 +85,7 @@ describe('ErrorCounts', () => {
organization,
});
- const countNodes = await screen.getAllByLabelText('number of errors');
+ const countNodes = screen.getAllByLabelText('number of errors');
expect(countNodes[0]).toHaveTextContent('1');
expect(countNodes[1]).toHaveTextContent('2');
@@ -94,11 +94,11 @@ describe('ErrorCounts', () => {
const pyIcon = await screen.findByTestId('platform-icon-python');
expect(pyIcon).toBeInTheDocument();
- expect(countNodes[0].parentElement).toHaveAttribute(
+ expect(countNodes[0]!.parentElement).toHaveAttribute(
'href',
'/mock-pathname/?f_e_project=my-js-app&t_main=errors'
);
- expect(countNodes[1].parentElement).toHaveAttribute(
+ expect(countNodes[1]!.parentElement).toHaveAttribute(
'href',
'/mock-pathname/?f_e_project=my-py-backend&t_main=errors'
);
@@ -118,7 +118,7 @@ describe('ErrorCounts', () => {
organization,
});
- const countNode = await screen.getByLabelText('total errors');
+ const countNode = screen.getByLabelText('total errors');
expect(countNode).toHaveTextContent('6');
const jsIcon = await screen.findByTestId('platform-icon-javascript');
@@ -127,7 +127,7 @@ describe('ErrorCounts', () => {
const pyIcon = await screen.findByTestId('platform-icon-python');
expect(pyIcon).toBeInTheDocument();
- const plusOne = await screen.getByLabelText('hidden projects');
+ const plusOne = screen.getByLabelText('hidden projects');
expect(plusOne).toHaveTextContent('+1');
expect(countNode.parentElement).toHaveAttribute(
diff --git a/static/app/components/replays/player/__stories__/providers.tsx b/static/app/components/replays/player/__stories__/providers.tsx
index 6df77654d230d4..acedf2efc98fbc 100644
--- a/static/app/components/replays/player/__stories__/providers.tsx
+++ b/static/app/components/replays/player/__stories__/providers.tsx
@@ -1,10 +1,10 @@
import type {ReactNode} from 'react';
import {StaticNoSkipReplayPreferences} from 'sentry/components/replays/preferences/replayPreferences';
-import {ReplayPlayerEventsContextProvider} from 'sentry/utils/replays/playback/providers/replayPlayerEventsContext';
import {ReplayPlayerPluginsContextProvider} from 'sentry/utils/replays/playback/providers/replayPlayerPluginsContext';
import {ReplayPlayerStateContextProvider} from 'sentry/utils/replays/playback/providers/replayPlayerStateContext';
import {ReplayPreferencesContextProvider} from 'sentry/utils/replays/playback/providers/replayPreferencesContext';
+import {ReplayReaderProvider} from 'sentry/utils/replays/playback/providers/replayReaderProvider';
import type ReplayReader from 'sentry/utils/replays/replayReader';
export default function Providers({
@@ -17,9 +17,9 @@ export default function Providers({
return (
-
+
{children}
-
+
);
diff --git a/static/app/components/replays/player/__stories__/replaySlugChooser.tsx b/static/app/components/replays/player/__stories__/replaySlugChooser.tsx
index 4cdb1db67ce1f8..c6b7c1709d76f7 100644
--- a/static/app/components/replays/player/__stories__/replaySlugChooser.tsx
+++ b/static/app/components/replays/player/__stories__/replaySlugChooser.tsx
@@ -2,7 +2,7 @@ import {Fragment, type ReactNode} from 'react';
import {css} from '@emotion/react';
import Providers from 'sentry/components/replays/player/__stories__/providers';
-import useReplayReader from 'sentry/utils/replays/hooks/useReplayReader';
+import useLoadReplayReader from 'sentry/utils/replays/hooks/useLoadReplayReader';
import useOrganization from 'sentry/utils/useOrganization';
import {useSessionStorage} from 'sentry/utils/useSessionStorage';
@@ -29,7 +29,7 @@ export default function ReplaySlugChooser({children}: {children: ReactNode}) {
function LoadReplay({children, replaySlug}: {children: ReactNode; replaySlug: string}) {
const organization = useOrganization();
- const {fetchError, fetching, replay} = useReplayReader({
+ const {fetchError, fetching, replay} = useLoadReplayReader({
orgSlug: organization.slug,
replaySlug,
});
diff --git a/static/app/components/replays/player/replayPlayer.tsx b/static/app/components/replays/player/replayPlayer.tsx
index a31ea43c52f763..ea63980418475d 100644
--- a/static/app/components/replays/player/replayPlayer.tsx
+++ b/static/app/components/replays/player/replayPlayer.tsx
@@ -6,13 +6,13 @@ import {
baseReplayerCss,
sentryReplayerCss,
} from 'sentry/components/replays/player/styles';
-import {useReplayPlayerEvents} from 'sentry/utils/replays/playback/providers/replayPlayerEventsContext';
import {useReplayPlayerPlugins} from 'sentry/utils/replays/playback/providers/replayPlayerPluginsContext';
import {
useReplayPlayerStateDispatch,
useReplayUserAction,
} from 'sentry/utils/replays/playback/providers/replayPlayerStateContext';
import {useReplayPrefs} from 'sentry/utils/replays/playback/providers/replayPreferencesContext';
+import {useReplayReader} from 'sentry/utils/replays/playback/providers/replayReaderProvider';
function useReplayerInstance() {
// The div that is emitted from react, where we will attach the replayer to
@@ -26,7 +26,7 @@ function useReplayerInstance() {
const [prefs] = useReplayPrefs();
const initialPrefsRef = useRef(prefs); // don't re-mount the player when prefs change, instead there's a useEffect
const getPlugins = useReplayPlayerPlugins();
- const events = useReplayPlayerEvents();
+ const replay = useReplayReader();
// Hooks to sync this Replayer state up and out of this component
const dispatch = useReplayPlayerStateDispatch();
@@ -39,7 +39,9 @@ function useReplayerInstance() {
return () => {};
}
- const replayer = new Replayer(events, {
+ const webFrames = replay.getRRWebFrames();
+
+ const replayer = new Replayer(webFrames, {
root,
blockClass: 'sentry-block',
mouseTail: {
@@ -48,7 +50,7 @@ function useReplayerInstance() {
lineWidth: 2,
strokeStyle: theme.purple200,
},
- plugins: getPlugins(events),
+ plugins: getPlugins(webFrames),
skipInactive: initialPrefsRef.current.isSkippingInactive,
speed: initialPrefsRef.current.playbackSpeed,
});
@@ -56,7 +58,7 @@ function useReplayerInstance() {
replayerRef.current = replayer;
dispatch({type: 'didMountPlayer', replayer, dispatch});
return () => dispatch({type: 'didUnmountPlayer', replayer});
- }, [dispatch, events, getPlugins, theme]);
+ }, [dispatch, getPlugins, replay, theme]);
useEffect(() => {
if (!replayerRef.current) {
diff --git a/static/app/components/replays/player/styles.tsx b/static/app/components/replays/player/styles.tsx
index 02a32c09c7ffa0..29c64ccf695428 100644
--- a/static/app/components/replays/player/styles.tsx
+++ b/static/app/components/replays/player/styles.tsx
@@ -19,6 +19,9 @@ export const baseReplayerCss = css`
border: none;
background: white;
}
+ .video-replayer-wrapper + .replayer-wrapper > iframe {
+ opacity: 0;
+ }
&[data-inspectable='true'] .replayer-wrapper > iframe {
/* Set pointer-events to make it easier to right-click & inspect */
@@ -111,18 +114,4 @@ export const sentryReplayerCss = (theme: Theme) => css`
height: 10px;
}
}
-
- /* Correctly positions the canvas for video replays and shows the purple "mousetails" */
- &.video-replayer {
- .replayer-wrapper {
- position: absolute;
- top: 0;
- left: 0;
- width: 100%;
- height: 100%;
- }
- .replayer-wrapper > iframe {
- opacity: 0;
- }
- }
`;
diff --git a/static/app/components/replays/replayContext.tsx b/static/app/components/replays/replayContext.tsx
index 25b64b29411833..e86b232fc55a4d 100644
--- a/static/app/components/replays/replayContext.tsx
+++ b/static/app/components/replays/replayContext.tsx
@@ -7,6 +7,7 @@ import {VideoReplayerWithInteractions} from 'sentry/components/replays/videoRepl
import {trackAnalytics} from 'sentry/utils/analytics';
import clamp from 'sentry/utils/number/clamp';
import type useInitialOffsetMs from 'sentry/utils/replays/hooks/useInitialTimeOffsetMs';
+import useTouchEventsCheck from 'sentry/utils/replays/playback/hooks/useTouchEventsCheck';
import {useReplayPrefs} from 'sentry/utils/replays/playback/providers/replayPreferencesContext';
import {ReplayCurrentTimeContextProvider} from 'sentry/utils/replays/playback/providers/useCurrentHoverTime';
import type ReplayReader from 'sentry/utils/replays/replayReader';
@@ -340,7 +341,6 @@ export function Provider({
}
}
- // eslint-disable-next-line no-new
const inst = new Replayer(events, {
root,
blockClass: 'sentry-block',
@@ -389,6 +389,8 @@ export function Provider({
]
);
+ useTouchEventsCheck({replay: isFetching ? null : replay});
+
const initVideoRoot = useCallback(
(root: RootElem) => {
if (root === null || isFetching) {
@@ -429,13 +431,8 @@ export function Provider({
// rrweb specific
theme,
eventsWithSnapshots: replay?.getRRWebFramesWithSnapshots() ?? [],
- touchEvents: replay?.getRRwebTouchEvents() ?? [],
// common to both
root,
- context: {
- sdkName: replay?.getReplay().sdk.name,
- sdkVersion: replay?.getReplay().sdk.version,
- },
});
// `.current` is marked as readonly, but it's safe to set the value from
// inside a `useEffect` hook.
diff --git a/static/app/components/replays/replayPlayer.tsx b/static/app/components/replays/replayPlayer.tsx
index 9b3160f52b3f58..408b93060583e9 100644
--- a/static/app/components/replays/replayPlayer.tsx
+++ b/static/app/components/replays/replayPlayer.tsx
@@ -217,6 +217,14 @@ const SentryPlayerRoot = styled(BasePlayerRoot)`
${baseReplayerCss}
/* Sentry-specific styles for the player */
${p => sentryReplayerCss(p.theme)}
+
+ .video-replayer-wrapper + .replayer-wrapper {
+ position: absolute;
+ top: 0;
+ left: 0;
+ width: 100%;
+ height: 100%;
+ }
`;
const Overlay = styled('div')`
diff --git a/static/app/components/replays/replayTagsTableRow.tsx b/static/app/components/replays/replayTagsTableRow.tsx
index 70f22a29057a16..0d05ad66674a5a 100644
--- a/static/app/components/replays/replayTagsTableRow.tsx
+++ b/static/app/components/replays/replayTagsTableRow.tsx
@@ -6,8 +6,10 @@ import type {LocationDescriptor} from 'history';
import {AnnotatedText} from 'sentry/components/events/meta/annotatedText';
import {KeyValueTableRow} from 'sentry/components/keyValueTable';
import Link from 'sentry/components/links/link';
+import {CollapsibleValue} from 'sentry/components/structuredEventData/collapsibleValue';
import {Tooltip} from 'sentry/components/tooltip';
import Version from 'sentry/components/version';
+import {space} from 'sentry/styles/space';
interface Props {
name: string;
@@ -15,6 +17,29 @@ interface Props {
generateUrl?: (name: string, value: ReactNode) => LocationDescriptor;
}
+const expandedViewKeys = [
+ 'sdk.replay.maskedViewClasses',
+ 'sdk.replay.unmaskedViewClasses',
+];
+
+function renderValueList(values: ReactNode[]) {
+ if (typeof values[0] === 'string') {
+ return values[0];
+ }
+ const valueItems = values[0] as Array;
+
+ if (!valueItems.length) {
+ return undefined;
+ }
+
+ return valueItems.map((value, index) => (
+
+ {value}
+
+
+ ));
+}
+
function ReplayTagsTableRow({name, values, generateUrl}: Props) {
const renderTagValue = useMemo(() => {
if (name === 'release') {
@@ -25,6 +50,17 @@ function ReplayTagsTableRow({name, values, generateUrl}: Props) {
));
}
+ if (
+ expandedViewKeys.includes(name) &&
+ renderValueList(values) &&
+ typeof renderValueList(values) !== 'string'
+ ) {
+ return (
+
+ {renderValueList(values)}
+
+ );
+ }
return values.map((value, index) => {
const target = generateUrl?.(name, value);
@@ -46,9 +82,20 @@ function ReplayTagsTableRow({name, values, generateUrl}: Props) {
}
value={
-
- {renderTagValue}
-
+
+
+ {renderTagValue}
+
+
}
/>
);
@@ -56,6 +103,15 @@ function ReplayTagsTableRow({name, values, generateUrl}: Props) {
export default ReplayTagsTableRow;
+const ValueContainer = styled('div')`
+ span {
+ font-size: ${p => p.theme.fontSizeMedium};
+ }
+ display: flex;
+ padding: ${space(0.25)};
+ justify-content: flex-end;
+`;
+
const StyledTooltip = styled(Tooltip)`
${p => p.theme.overflowEllipsis};
`;
diff --git a/static/app/components/replays/useQueryBasedColumnResize.tsx b/static/app/components/replays/useQueryBasedColumnResize.tsx
index f3a56e445fa340..a23c613a2d57b7 100644
--- a/static/app/components/replays/useQueryBasedColumnResize.tsx
+++ b/static/app/components/replays/useQueryBasedColumnResize.tsx
@@ -4,8 +4,8 @@ import dropRightWhile from 'lodash/dropRightWhile';
import type {GridColumnOrder} from 'sentry/components/gridEditable';
import {COL_WIDTH_UNDEFINED} from 'sentry/components/gridEditable';
-import {browserHistory} from 'sentry/utils/browserHistory';
import {decodeInteger, decodeList} from 'sentry/utils/queryString';
+import {useNavigate} from 'sentry/utils/useNavigate';
interface Props {
columns: GridColumnOrder[];
@@ -19,6 +19,7 @@ export default function useQueryBasedColumnResize({
paramName = 'width',
}: Props) {
const queryParam = location.query[paramName];
+ const navigate = useNavigate();
const columnsWidthWidths = useMemo(() => {
const widths = decodeList(queryParam);
@@ -34,15 +35,18 @@ export default function useQueryBasedColumnResize({
(column, i) =>
(i === columnIndex ? resizedColumn.width : column.width) ?? COL_WIDTH_UNDEFINED
);
- browserHistory.replace({
- pathname: location.pathname,
- query: {
- ...location.query,
- [paramName]: dropRightWhile(widths, width => width === COL_WIDTH_UNDEFINED),
+ navigate(
+ {
+ pathname: location.pathname,
+ query: {
+ ...location.query,
+ [paramName]: dropRightWhile(widths, width => width === COL_WIDTH_UNDEFINED),
+ },
},
- });
+ {replace: true}
+ );
},
- [columns, location.pathname, location.query, paramName]
+ [columns, location.pathname, location.query, paramName, navigate]
);
return {
diff --git a/static/app/components/replays/utils.spec.tsx b/static/app/components/replays/utils.spec.tsx
index 7e9928dfb91948..0ce003ea1cddcb 100644
--- a/static/app/components/replays/utils.spec.tsx
+++ b/static/app/components/replays/utils.spec.tsx
@@ -110,11 +110,11 @@ describe('getFramesByColumn', () => {
it('should put a crumbs in the first and last buckets', () => {
const columnCount = 3;
- const columns = getFramesByColumn(durationMs, [CRUMB_1, CRUMB_5], columnCount);
+ const columns = getFramesByColumn(durationMs, [CRUMB_1!, CRUMB_5!], columnCount);
expect(columns).toEqual(
new Map([
- [1, [CRUMB_1]],
- [3, [CRUMB_5]],
+ [1, [CRUMB_1!]],
+ [3, [CRUMB_5!]],
])
);
});
@@ -124,7 +124,7 @@ describe('getFramesByColumn', () => {
const columnCount = 6;
const columns = getFramesByColumn(
durationMs,
- [CRUMB_1, CRUMB_2, CRUMB_3, CRUMB_4, CRUMB_5],
+ [CRUMB_1!, CRUMB_2!, CRUMB_3!, CRUMB_4!, CRUMB_5!],
columnCount
);
expect(columns).toEqual(
@@ -322,7 +322,7 @@ describe('findVideoSegmentIndex', () => {
const trackList2 = segments2.map(
({timestamp}, index) => [timestamp, index] as [ts: number, index: number]
);
- expect(findVideoSegmentIndex(trackList2, segments2, 1000)).toEqual(-1);
+ expect(findVideoSegmentIndex(trackList2, segments2, 1000)).toBe(-1);
});
it('returns first segment if target timestamp is before the first segment', () => {
@@ -351,6 +351,6 @@ describe('findVideoSegmentIndex', () => {
const trackList2 = segments2.map(
({timestamp}, index) => [timestamp, index] as [ts: number, index: number]
);
- expect(findVideoSegmentIndex(trackList2, segments2, 1000)).toEqual(-1);
+ expect(findVideoSegmentIndex(trackList2, segments2, 1000)).toBe(-1);
});
});
diff --git a/static/app/components/replays/utils.tsx b/static/app/components/replays/utils.tsx
index a750ed5b279590..5c1ee7851f79d9 100644
--- a/static/app/components/replays/utils.tsx
+++ b/static/app/components/replays/utils.tsx
@@ -134,17 +134,17 @@ export function flattenFrames(frames: SpanFrame[]): FlattenedSpanRange[] {
};
});
- const flattened = [first];
+ const flattened = [first!];
for (const span of rest) {
let overlap = false;
for (const range of flattened) {
- if (doesOverlap(range, span)) {
+ if (doesOverlap(range!, span)) {
overlap = true;
- range.frameCount += 1;
- range.startTimestamp = Math.min(range.startTimestamp, span.startTimestamp);
- range.endTimestamp = Math.max(range.endTimestamp, span.endTimestamp);
- range.duration = range.endTimestamp - range.startTimestamp;
+ range!.frameCount += 1;
+ range!.startTimestamp = Math.min(range!.startTimestamp, span.startTimestamp);
+ range!.endTimestamp = Math.max(range!.endTimestamp, span.endTimestamp);
+ range!.duration = range!.endTimestamp - range!.startTimestamp;
break;
}
}
@@ -178,11 +178,11 @@ export function findVideoSegmentIndex(
const mid = Math.floor((start + end) / 2);
- const [ts, index] = trackList[mid];
+ const [ts, index] = trackList[mid]!;
const segment = segments[index];
// Segment match found
- if (targetTimestamp >= ts && targetTimestamp <= ts + segment.duration) {
+ if (targetTimestamp >= ts && targetTimestamp <= ts + segment!.duration) {
return index;
}
diff --git a/static/app/components/replays/videoReplayer.spec.tsx b/static/app/components/replays/videoReplayer.spec.tsx
index 88c56381b14f19..23904e59284982 100644
--- a/static/app/components/replays/videoReplayer.spec.tsx
+++ b/static/app/components/replays/videoReplayer.spec.tsx
@@ -91,7 +91,7 @@ describe('VideoReplayer - no starting gap', () => {
config: {skipInactive: false, speed: 1.0},
});
// @ts-expect-error private
- expect(inst._currentIndex).toEqual(0);
+ expect(inst._currentIndex).toBe(0);
const playPromise = inst.play(6500);
jest.advanceTimersByTime(10000);
@@ -99,10 +99,10 @@ describe('VideoReplayer - no starting gap', () => {
await playPromise;
// @ts-expect-error private
- expect(inst._currentIndex).toEqual(1);
+ expect(inst._currentIndex).toBe(1);
// `currentTime` is in seconds
// @ts-expect-error private
- expect(inst.getVideo(inst._currentIndex)?.currentTime).toEqual(1.5);
+ expect(inst.getVideo(inst._currentIndex)?.currentTime).toBe(1.5);
});
it('seeks to a gap in a video', async () => {
@@ -127,10 +127,10 @@ describe('VideoReplayer - no starting gap', () => {
jest.advanceTimersByTime(2500);
await playPromise;
// @ts-expect-error private
- expect(inst._currentIndex).toEqual(3);
+ expect(inst._currentIndex).toBe(3);
// `currentTime` is in seconds
// @ts-expect-error private
- expect(inst.getVideo(inst._currentIndex)?.currentTime).toEqual(0);
+ expect(inst.getVideo(inst._currentIndex)?.currentTime).toBe(0);
});
it('seeks past end of the replay', async () => {
@@ -153,10 +153,10 @@ describe('VideoReplayer - no starting gap', () => {
jest.advanceTimersByTime(5000);
await playPromise;
// @ts-expect-error private
- expect(inst._currentIndex).toEqual(5);
+ expect(inst._currentIndex).toBe(5);
// `currentTime` is in seconds
// @ts-expect-error private
- expect(inst.getVideo(inst._currentIndex)?.currentTime).toEqual(5);
+ expect(inst.getVideo(inst._currentIndex)?.currentTime).toBe(5);
});
it('initially only loads videos from 0 to BUFFER', async () => {
@@ -175,9 +175,9 @@ describe('VideoReplayer - no starting gap', () => {
jest.advanceTimersByTime(2500);
await playPromise;
// @ts-expect-error private
- expect(inst._currentIndex).toEqual(0);
+ expect(inst._currentIndex).toBe(0);
// @ts-expect-error private
- expect(inst._videos.size).toEqual(3);
+ expect(inst._videos.size).toBe(3);
});
it('should load the correct videos after playing at a timestamp', async () => {
@@ -197,7 +197,7 @@ describe('VideoReplayer - no starting gap', () => {
jest.advanceTimersByTime(2500);
await playPromise;
// @ts-expect-error private
- expect(inst._currentIndex).toEqual(7);
+ expect(inst._currentIndex).toBe(7);
// videos loaded should be [0, 1, 2, 4, 5, 6, 7]
// since we have [0, 1, 2] preloaded initially
@@ -208,10 +208,10 @@ describe('VideoReplayer - no starting gap', () => {
// @ts-expect-error private
const getVideo = index => inst.getVideo(index);
- expect(videos.size).toEqual(7);
+ expect(videos.size).toBe(7);
expect(videos.get(0)).toEqual(getVideo(0));
expect(videos.get(2)).toEqual(getVideo(2));
- expect(videos.get(3)).toEqual(undefined);
+ expect(videos.get(3)).toBeUndefined();
expect(videos.get(4)).toEqual(getVideo(4));
expect(videos.get(7)).toEqual(getVideo(7));
});
@@ -233,7 +233,7 @@ describe('VideoReplayer - no starting gap', () => {
jest.advanceTimersByTime(2500);
await playPromise;
// @ts-expect-error private
- expect(inst._currentIndex).toEqual(6);
+ expect(inst._currentIndex).toBe(6);
// @ts-expect-error private
const videos = inst._videos;
@@ -241,7 +241,7 @@ describe('VideoReplayer - no starting gap', () => {
const getVideo = index => inst.getVideo(index);
// videos loaded should be [0, 1, 2, 3, 4, 5, 7, 8]
- expect(videos.size).toEqual(8);
+ expect(videos.size).toBe(8);
expect(videos.get(0)).toEqual(getVideo(0));
expect(videos.get(2)).toEqual(getVideo(2));
expect(videos.get(5)).toEqual(getVideo(5));
@@ -304,16 +304,16 @@ describe('VideoReplayer - with starting gap', () => {
config: {skipInactive: false, speed: 1.0},
});
// @ts-expect-error private
- expect(inst._currentIndex).toEqual(0);
+ expect(inst._currentIndex).toBe(0);
const playPromise = inst.play(1500);
jest.advanceTimersByTime(2000);
await playPromise;
// @ts-expect-error private
- expect(inst._currentIndex).toEqual(0);
+ expect(inst._currentIndex).toBe(0);
// `currentTime` is in seconds
// @ts-expect-error private
- expect(inst.getVideo(inst._currentIndex)?.currentTime).toEqual(0);
+ expect(inst.getVideo(inst._currentIndex)?.currentTime).toBe(0);
});
it('seeks to a gap in a video', async () => {
@@ -338,10 +338,10 @@ describe('VideoReplayer - with starting gap', () => {
jest.advanceTimersByTime(2500);
await playPromise;
// @ts-expect-error private
- expect(inst._currentIndex).toEqual(3);
+ expect(inst._currentIndex).toBe(3);
// `currentTime` is in seconds
// @ts-expect-error private
- expect(inst.getVideo(inst._currentIndex)?.currentTime).toEqual(0);
+ expect(inst.getVideo(inst._currentIndex)?.currentTime).toBe(0);
});
it('seeks past end of the replay', async () => {
@@ -364,10 +364,10 @@ describe('VideoReplayer - with starting gap', () => {
jest.advanceTimersByTime(5000);
await playPromise;
// @ts-expect-error private
- expect(inst._currentIndex).toEqual(5);
+ expect(inst._currentIndex).toBe(5);
// `currentTime` is in seconds
// @ts-expect-error private
- expect(inst.getVideo(inst._currentIndex)?.currentTime).toEqual(5);
+ expect(inst.getVideo(inst._currentIndex)?.currentTime).toBe(5);
});
});
@@ -434,21 +434,21 @@ describe('VideoReplayer - with ending gap', () => {
// we're still within the last segment (5)
// @ts-expect-error private
- expect(inst._currentIndex).toEqual(5);
- expect(inst.getCurrentTime()).toEqual(40000);
+ expect(inst._currentIndex).toBe(5);
+ expect(inst.getCurrentTime()).toBe(40000);
// now we are in the gap
// timer should still be going since the duration is 50s
jest.advanceTimersByTime(5000);
// @ts-expect-error private
- expect(inst._isPlaying).toEqual(true);
+ expect(inst._isPlaying).toBe(true);
// a long time passes
// ensure the timer stops at the end duration (50s)
jest.advanceTimersByTime(60000);
- expect(inst.getCurrentTime()).toEqual(50000);
+ expect(inst.getCurrentTime()).toBe(50000);
// @ts-expect-error private
- expect(inst._isPlaying).toEqual(false);
+ expect(inst._isPlaying).toBe(false);
});
it('ends at the proper time if seeking into a gap at the end', async () => {
@@ -472,15 +472,15 @@ describe('VideoReplayer - with ending gap', () => {
jest.advanceTimersByTime(4000);
// we should be still playing in the gap
- expect(inst.getCurrentTime()).toEqual(44002);
+ expect(inst.getCurrentTime()).toBe(44002);
// @ts-expect-error private
- expect(inst._isPlaying).toEqual(true);
+ expect(inst._isPlaying).toBe(true);
// a long time passes
// ensure the timer stops at the end duration (50s)
jest.advanceTimersByTime(60000);
expect(inst.getCurrentTime()).toBeLessThan(50100);
// @ts-expect-error private
- expect(inst._isPlaying).toEqual(false);
+ expect(inst._isPlaying).toBe(false);
});
});
diff --git a/static/app/components/replays/videoReplayer.tsx b/static/app/components/replays/videoReplayer.tsx
index b2894d200d7258..b16ceb6444ef40 100644
--- a/static/app/components/replays/videoReplayer.tsx
+++ b/static/app/components/replays/videoReplayer.tsx
@@ -3,8 +3,6 @@ import type {ClipWindow, VideoEvent} from 'sentry/utils/replays/types';
import {findVideoSegmentIndex} from './utils';
-type RootElem = HTMLDivElement | null;
-
// The number of segments to load on either side of the requested segment (around 15 seconds)
// Also the number of segments we load initially
const PRELOAD_BUFFER = 3;
@@ -19,7 +17,7 @@ interface VideoReplayerOptions {
onBuffer: (isBuffering: boolean) => void;
onFinished: () => void;
onLoaded: (event: any) => void;
- root: RootElem;
+ root: HTMLDivElement;
start: number;
videoApiPrefix: string;
clipWindow?: ClipWindow;
@@ -92,9 +90,8 @@ export class VideoReplayer {
this.config = config;
this.wrapper = document.createElement('div');
- if (root) {
- root.appendChild(this.wrapper);
- }
+ this.wrapper.className = 'video-replayer-wrapper';
+ root.appendChild(this.wrapper);
this._trackList = this._attachments.map(({timestamp}, i) => [timestamp, i]);
@@ -144,7 +141,7 @@ export class VideoReplayer {
const handleLoadedData = event => {
// Used to correctly set the dimensions of the first frame
if (index === 0) {
- this._callbacks.onLoaded(event);
+ this._callbacks.onLoaded!(event);
}
// Only call this for current segment as we preload multiple
@@ -162,7 +159,7 @@ export class VideoReplayer {
const handlePlay = event => {
if (index === this._currentIndex) {
- this._callbacks.onLoaded(event);
+ this._callbacks.onLoaded!(event);
}
};
@@ -171,13 +168,13 @@ export class VideoReplayer {
if (index === this._currentIndex) {
// Theoretically we could have different orientations and they should
// only happen in different segments
- this._callbacks.onLoaded(event);
+ this._callbacks.onLoaded!(event);
}
};
const handleSeeking = event => {
// Centers the video when seeking (and video is not playing)
- this._callbacks.onLoaded(event);
+ this._callbacks.onLoaded!(event);
};
el.addEventListener('ended', handleEnded);
@@ -287,12 +284,12 @@ export class VideoReplayer {
this.resumeTimer();
}
- this._callbacks.onBuffer(isBuffering);
+ this._callbacks.onBuffer!(isBuffering);
}
private stopReplay() {
this._timer.stop();
- this._callbacks.onFinished();
+ this._callbacks.onFinished!();
this._isPlaying = false;
}
diff --git a/static/app/components/replays/videoReplayerWithInteractions.tsx b/static/app/components/replays/videoReplayerWithInteractions.tsx
index ebccbacb409ff7..58bb814a63701c 100644
--- a/static/app/components/replays/videoReplayerWithInteractions.tsx
+++ b/static/app/components/replays/videoReplayerWithInteractions.tsx
@@ -1,25 +1,20 @@
import type {Theme} from '@emotion/react';
-import * as Sentry from '@sentry/react';
import {Replayer} from '@sentry-internal/rrweb';
import type {VideoReplayerConfig} from 'sentry/components/replays/videoReplayer';
import {VideoReplayer} from 'sentry/components/replays/videoReplayer';
import type {ClipWindow, RecordingFrame, VideoEvent} from 'sentry/utils/replays/types';
-type RootElem = HTMLDivElement | null;
-
interface VideoReplayerWithInteractionsOptions {
- context: {sdkName: string | undefined | null; sdkVersion: string | undefined | null};
durationMs: number;
eventsWithSnapshots: RecordingFrame[];
onBuffer: (isBuffering: boolean) => void;
onFinished: () => void;
onLoaded: (event: any) => void;
- root: RootElem;
+ root: HTMLDivElement;
speed: number;
start: number;
theme: Theme;
- touchEvents: RecordingFrame[];
videoApiPrefix: string;
videoEvents: VideoEvent[];
clipWindow?: ClipWindow;
@@ -37,7 +32,6 @@ export class VideoReplayerWithInteractions {
constructor({
videoEvents,
eventsWithSnapshots,
- touchEvents,
root,
start,
videoApiPrefix,
@@ -48,7 +42,6 @@ export class VideoReplayerWithInteractions {
durationMs,
theme,
speed,
- context,
}: VideoReplayerWithInteractionsOptions) {
this.config = {
skipInactive: false,
@@ -67,26 +60,8 @@ export class VideoReplayerWithInteractions {
config: this.config,
});
- root?.classList.add('video-replayer');
-
- const grouped = Object.groupBy(touchEvents, (t: any) => t.data.pointerId);
- Object.values(grouped).forEach(t => {
- if (t?.length !== 2) {
- Sentry.captureMessage(
- 'Mobile replay has mismatching touch start and end events',
- {
- tags: {
- sdk_name: context.sdkName,
- sdk_version: context.sdkVersion,
- touch_event_type: typeof t,
- },
- }
- );
- }
- });
-
this.replayer = new Replayer(eventsWithSnapshots, {
- root: root as Element,
+ root,
blockClass: 'sentry-block',
mouseTail: {
duration: 0.75 * 1000,
diff --git a/static/app/components/replaysOnboarding/platformOptionDropdown.tsx b/static/app/components/replaysOnboarding/platformOptionDropdown.tsx
index 780a9fc75f8924..e735456aa1acdb 100644
--- a/static/app/components/replaysOnboarding/platformOptionDropdown.tsx
+++ b/static/app/components/replaysOnboarding/platformOptionDropdown.tsx
@@ -41,7 +41,7 @@ function OptionControl({option, value, onChange, disabled}: OptionControlProps)
return (
v.value === value)?.label ?? option.items[0].label
+ option.items.find(v => v.value === value)?.label ?? option.items[0]!.label
}
value={value}
onChange={onChange}
@@ -81,7 +81,7 @@ export function PlatformOptionDropdown({
handleChange('siblingOption', v.value)}
disabled={disabled}
/>
diff --git a/static/app/components/replaysOnboarding/sidebar.tsx b/static/app/components/replaysOnboarding/sidebar.tsx
index f2c6054f997d19..e40f0a2e5b30af 100644
--- a/static/app/components/replaysOnboarding/sidebar.tsx
+++ b/static/app/components/replaysOnboarding/sidebar.tsx
@@ -181,7 +181,7 @@ function OnboardingContent({
value: PlatformKey;
label?: ReactNode;
textValue?: string;
- }>(jsFrameworkSelectOptions[0]);
+ }>(jsFrameworkSelectOptions[0]!);
const backendPlatform =
currentProject.platform && replayBackendPlatforms.includes(currentProject.platform);
@@ -215,7 +215,7 @@ function OnboardingContent({
platform:
showJsFrameworkInstructions && setupMode() === 'npm'
? replayJsFrameworkOptions().find(p => p.id === jsFramework.value) ??
- replayJsFrameworkOptions()[0]
+ replayJsFrameworkOptions()[0]!
: currentPlatform,
projSlug: currentProject.slug,
orgSlug: organization.slug,
@@ -226,7 +226,7 @@ function OnboardingContent({
const {docs: jsFrameworkDocs} = useLoadGettingStarted({
platform:
replayJsFrameworkOptions().find(p => p.id === jsFramework.value) ??
- replayJsFrameworkOptions()[0],
+ replayJsFrameworkOptions()[0]!,
projSlug: currentProject.slug,
orgSlug: organization.slug,
productType: 'replay',
diff --git a/static/app/components/replaysOnboarding/utils.tsx b/static/app/components/replaysOnboarding/utils.tsx
index 96273c99b035ec..1f057421d3b950 100644
--- a/static/app/components/replaysOnboarding/utils.tsx
+++ b/static/app/components/replaysOnboarding/utils.tsx
@@ -7,7 +7,7 @@ export function replayJsFrameworkOptions(): PlatformIntegration[] {
// at the front so that it shows up by default in the onboarding.
const frameworks = platforms.filter(p => replayFrontendPlatforms.includes(p.id));
const jsPlatformIdx = frameworks.findIndex(p => p.id === 'javascript');
- const jsPlatform = frameworks[jsPlatformIdx];
+ const jsPlatform = frameworks[jsPlatformIdx]!;
// move javascript to the front
frameworks.splice(jsPlatformIdx, 1);
diff --git a/static/app/components/repositoryEditForm.tsx b/static/app/components/repositoryEditForm.tsx
deleted file mode 100644
index a7b869e704c820..00000000000000
--- a/static/app/components/repositoryEditForm.tsx
+++ /dev/null
@@ -1,80 +0,0 @@
-import {Alert} from 'sentry/components/alert';
-import FieldFromConfig from 'sentry/components/forms/fieldFromConfig';
-import type {FormProps} from 'sentry/components/forms/form';
-import Form from 'sentry/components/forms/form';
-import type {Field} from 'sentry/components/forms/types';
-import ExternalLink from 'sentry/components/links/externalLink';
-import {t, tct} from 'sentry/locale';
-import type {Repository} from 'sentry/types/integrations';
-
-type Props = Pick & {
- closeModal: () => void;
- onSubmitSuccess: (data: any) => void;
- orgSlug: string;
- repository: Repository;
-};
-
-const formFields: Field[] = [
- {
- name: 'name',
- type: 'string',
- required: true,
- label: t('Name of your repository.'),
- },
- {
- name: 'url',
- type: 'string',
- required: false,
- label: t('Full URL to your repository.'),
- placeholder: t('https://github.com/my-org/my-repo/'),
- },
-];
-
-function RepositoryEditForm({
- repository,
- onCancel,
- orgSlug,
- onSubmitSuccess,
- closeModal,
-}: Props) {
- const initialData = {
- name: repository.name,
- url: repository.url || '',
- };
-
- return (
-
- );
-}
-
-export default RepositoryEditForm;
diff --git a/static/app/components/reprocessedBox.tsx b/static/app/components/reprocessedBox.tsx
index e6ae46d7fe992e..e96090f01b43a8 100644
--- a/static/app/components/reprocessedBox.tsx
+++ b/static/app/components/reprocessedBox.tsx
@@ -1,4 +1,4 @@
-import {Component} from 'react';
+import {useState} from 'react';
import styled from '@emotion/styled';
import {BannerContainer, BannerSummary} from 'sentry/components/events/styles';
@@ -18,29 +18,29 @@ type Props = {
className?: string;
};
-type State = {
- isBannerHidden: boolean;
-};
-
-class ReprocessedBox extends Component {
- state: State = {
- isBannerHidden: localStorage.getItem(this.getBannerUniqueId()) === 'true',
- };
-
- getBannerUniqueId() {
- const {reprocessActivity} = this.props;
+function ReprocessedBox({
+ orgSlug,
+ reprocessActivity,
+ groupCount,
+ className,
+ groupId,
+}: Props) {
+ const getBannerUniqueId = () => {
const {id} = reprocessActivity;
return `reprocessed-activity-${id}-banner-dismissed`;
- }
+ };
+
+ const [isBannerHidden, setIsBannerHidden] = useState(
+ localStorage.getItem(getBannerUniqueId()) === 'true'
+ );
- handleBannerDismiss = () => {
- localStorage.setItem(this.getBannerUniqueId(), 'true');
- this.setState({isBannerHidden: true});
+ const handleBannerDismiss = () => {
+ localStorage.setItem(getBannerUniqueId(), 'true');
+ setIsBannerHidden(true);
};
- renderMessage() {
- const {orgSlug, reprocessActivity, groupCount, groupId} = this.props;
+ const renderMessage = () => {
const {data} = reprocessActivity;
const {eventCount, oldGroupId, newGroupId} = data;
@@ -65,32 +65,26 @@ class ReprocessedBox extends Component {
),
});
- }
-
- render() {
- const {isBannerHidden} = this.state;
-
- if (isBannerHidden) {
- return null;
- }
-
- const {className} = this.props;
+ };
- return (
-
-
-
- {this.renderMessage()}
-
-
-
- );
+ if (isBannerHidden) {
+ return null;
}
+
+ return (
+
+
+
+ {renderMessage()}
+
+
+
+ );
}
export default ReprocessedBox;
diff --git a/static/app/components/resultGrid.tsx b/static/app/components/resultGrid.tsx
index e14af9c402250d..d4376e5e5ac85c 100644
--- a/static/app/components/resultGrid.tsx
+++ b/static/app/components/resultGrid.tsx
@@ -204,7 +204,7 @@ class ResultGrid extends Component {
this.setState(
{
query: queryParams.query ?? '',
- sortBy: queryParams.sortBy ?? this.props.defaultSort,
+ sortBy: queryParams.sortBy ?? this.props.defaultSort!,
filters: {...queryParams},
pageLinks: null,
loading: true,
@@ -358,7 +358,7 @@ class ResultGrid extends Component {
, 'onChange' | 'value'> & {
function RoleSelectControl({roles, disableUnallowed, ...props}: Props) {
const organization = useOrganization();
const isMemberInvite =
- organization.features.includes('members-invite-teammates') &&
- organization.allowMemberInvite &&
- organization.access?.includes('member:invite');
+ organization.allowMemberInvite && organization.access?.includes('member:invite');
return (
{
// Still have 4 results, but is re-ordered
expect(mock.mock.calls[0][0].results).toHaveLength(6);
- expect(mock.mock.calls[0][0].results[0].item.model.slug).toBe('foo-team');
});
+ expect(mock.mock.calls[0][0].results[0].item.model.slug).toBe('foo-team');
});
describe('API queries', function () {
diff --git a/static/app/components/search/sources/apiSource.tsx b/static/app/components/search/sources/apiSource.tsx
index 516a55dc30209e..7940280bc9c5e0 100644
--- a/static/app/components/search/sources/apiSource.tsx
+++ b/static/app/components/search/sources/apiSource.tsx
@@ -425,16 +425,16 @@ class ApiSource extends Component {
const [searchResults, directResults] = await Promise.all([
this.getSearchableResults([
- organizations,
- projects,
- teams,
- members,
- plugins,
- integrations,
- sentryApps,
- docIntegrations,
+ organizations!,
+ projects!,
+ teams!,
+ members!,
+ plugins!,
+ integrations!,
+ sentryApps!,
+ docIntegrations!,
]),
- this.getDirectResults([shortIdLookup, eventIdLookup]),
+ this.getDirectResults([shortIdLookup!, eventIdLookup!]),
]);
// TODO(XXX): Might consider adding logic to maintain consistent ordering
@@ -468,14 +468,14 @@ class ApiSource extends Component {
docIntegrations,
] = requests;
const searchResults = await Promise.all([
- createOrganizationResults(organizations),
- createProjectResults(projects, orgId),
- createTeamResults(teams, orgId),
- createMemberResults(members, orgId),
- createIntegrationResults(integrations, orgId),
- createPluginResults(plugins, orgId),
- createSentryAppResults(sentryApps, orgId),
- createDocIntegrationResults(docIntegrations, orgId),
+ createOrganizationResults(organizations!),
+ createProjectResults(projects!, orgId),
+ createTeamResults(teams!, orgId),
+ createMemberResults(members!, orgId),
+ createIntegrationResults(integrations!, orgId),
+ createPluginResults(plugins!, orgId),
+ createSentryAppResults(sentryApps!, orgId),
+ createDocIntegrationResults(docIntegrations!, orgId),
]);
return searchResults.flat();
@@ -488,8 +488,8 @@ class ApiSource extends Component {
const directResults = (
await Promise.all([
- createShortIdLookupResult(shortIdLookup),
- createEventIdLookupResult(eventIdLookup),
+ createShortIdLookupResult(shortIdLookup!),
+ createEventIdLookupResult(eventIdLookup!),
])
).filter(defined);
diff --git a/static/app/components/search/sources/commandSource.tsx b/static/app/components/search/sources/commandSource.tsx
index 682d5641e2a558..8c8f36a72e34b8 100644
--- a/static/app/components/search/sources/commandSource.tsx
+++ b/static/app/components/search/sources/commandSource.tsx
@@ -9,6 +9,7 @@ import ConfigStore from 'sentry/stores/configStore';
import type {PlainRoute} from 'sentry/types/legacyReactRouter';
import type {Fuse} from 'sentry/utils/fuzzySearch';
import {createFuzzySearch} from 'sentry/utils/fuzzySearch';
+import {removeBodyTheme} from 'sentry/utils/removeBodyTheme';
import type {ChildProps, ResultItem} from './types';
@@ -44,8 +45,10 @@ const ACTIONS: Action[] = [
title: t('Toggle dark mode'),
description: t('Toggle dark mode (superuser only atm)'),
requiresSuperuser: true,
- action: () =>
- ConfigStore.set('theme', ConfigStore.get('theme') === 'dark' ? 'light' : 'dark'),
+ action: () => {
+ removeBodyTheme();
+ ConfigStore.set('theme', ConfigStore.get('theme') === 'dark' ? 'light' : 'dark');
+ },
},
{
diff --git a/static/app/components/search/sources/helpSource.tsx b/static/app/components/search/sources/helpSource.tsx
index 95f6623df24f16..4f72b1a7c234dc 100644
--- a/static/app/components/search/sources/helpSource.tsx
+++ b/static/app/components/search/sources/helpSource.tsx
@@ -121,8 +121,8 @@ function mapSearchResults(results: SearchResult[]) {
// The first element should indicate the section.
if (sectionItems.length > 0) {
- sectionItems[0].item.sectionHeading = section.name;
- sectionItems[0].item.sectionCount = sectionItems.length;
+ sectionItems[0]!.item.sectionHeading = section.name;
+ sectionItems[0]!.item.sectionCount = sectionItems.length;
items.push(...sectionItems);
return;
diff --git a/static/app/components/search/sources/index.tsx b/static/app/components/search/sources/index.tsx
index e4d06f11a6b6e5..523c99cd7f9190 100644
--- a/static/app/components/search/sources/index.tsx
+++ b/static/app/components/search/sources/index.tsx
@@ -1,4 +1,4 @@
-import {Component} from 'react';
+import {useCallback} from 'react';
import type {Fuse} from 'sentry/utils/fuzzySearch';
@@ -23,53 +23,53 @@ type SourceResult = {
results: Result[];
};
-class SearchSources extends Component {
- // `allSources` will be an array of all result objects from each source
- renderResults(allSources: SourceResult[]) {
- const {children} = this.props;
+function SearchSources(props: Props) {
+ const {children, sources} = props;
- // loading means if any result has `isLoading` OR any result is null
- const isLoading = !!allSources.find(arg => arg.isLoading || arg.results === null);
+ // `allSources` will be an array of all result objects from each source
+ const renderResults = useCallback(
+ (allSources: SourceResult[]) => {
+ // loading means if any result has `isLoading` OR any result is null
+ const isLoading = !!allSources.find(arg => arg.isLoading || arg.results === null);
- const foundResults = isLoading
- ? []
- : allSources
- .flatMap(({results}) => results ?? [])
- .sort((a, b) => (a.score ?? 0) - (b.score ?? 0));
- const hasAnyResults = !!foundResults.length;
+ const foundResults = isLoading
+ ? []
+ : allSources
+ .flatMap(({results}) => results ?? [])
+ .sort((a, b) => (a.score ?? 0) - (b.score ?? 0));
+ const hasAnyResults = !!foundResults.length;
- return children({
- isLoading,
- results: foundResults,
- hasAnyResults,
- });
- }
+ return children({
+ isLoading,
+ results: foundResults,
+ hasAnyResults,
+ });
+ },
+ [children]
+ );
- renderSources(sources: Props['sources'], results: SourceResult[], idx: number) {
- if (idx >= sources.length) {
- return this.renderResults(results);
- }
- const Source = sources[idx];
- return (
-
- {(args: SourceResult) => {
- // Mutate the array instead of pushing because we don't know how often
- // this child function will be called and pushing will cause duplicate
- // results to be pushed for all calls down the chain.
- results[idx] = args;
- return this.renderSources(sources, results, idx + 1);
- }}
-
- );
- }
+ const renderSources = useCallback(
+ (results: SourceResult[], idx: number) => {
+ if (idx >= sources.length) {
+ return renderResults(results);
+ }
+ const Source = sources[idx]!;
+ return (
+
+ {(args: SourceResult) => {
+ // Mutate the array instead of pushing because we don't know how often
+ // this child function will be called and pushing will cause duplicate
+ // results to be pushed for all calls down the chain.
+ results[idx] = args;
+ return renderSources(results, idx + 1);
+ }}
+
+ );
+ },
+ [props, renderResults, sources]
+ );
- render() {
- return this.renderSources(
- this.props.sources,
- new Array(this.props.sources.length),
- 0
- );
- }
+ return renderSources(new Array(sources.length), 0);
}
export default SearchSources;
diff --git a/static/app/components/searchQueryBuilder/hooks/useQueryBuilderState.tsx b/static/app/components/searchQueryBuilder/hooks/useQueryBuilderState.tsx
index 3296faf061fe89..8f9157cb041935 100644
--- a/static/app/components/searchQueryBuilder/hooks/useQueryBuilderState.tsx
+++ b/static/app/components/searchQueryBuilder/hooks/useQueryBuilderState.tsx
@@ -131,7 +131,7 @@ function removeQueryTokensFromQuery(
}
return removeExcessWhitespaceFromParts(
- query.substring(0, tokens[0].location.start.offset),
+ query.substring(0, tokens[0]!.location.start.offset),
query.substring(tokens.at(-1)!.location.end.offset)
);
}
@@ -243,7 +243,7 @@ function replaceQueryTokens(
return query;
}
- const start = query.substring(0, tokens[0].location.start.offset);
+ const start = query.substring(0, tokens[0]!.location.start.offset);
const end = query.substring(tokens.at(-1)!.location.end.offset);
return start + value + end;
@@ -296,7 +296,7 @@ export function replaceTokensWithPadding(
return query;
}
- const start = query.substring(0, tokens[0].location.start.offset);
+ const start = query.substring(0, tokens[0]!.location.start.offset);
const end = query.substring(tokens.at(-1)!.location.end.offset);
return removeExcessWhitespaceFromParts(start, value, end);
@@ -367,7 +367,7 @@ function updateFilterMultipleValues(
const newValue =
uniqNonEmptyValues.length > 1
? `[${uniqNonEmptyValues.join(',')}]`
- : uniqNonEmptyValues[0];
+ : uniqNonEmptyValues[0]!;
return {...state, query: replaceQueryToken(state.query, token.value, newValue)};
}
diff --git a/static/app/components/searchQueryBuilder/hooks/useSelectOnDrag.tsx b/static/app/components/searchQueryBuilder/hooks/useSelectOnDrag.tsx
index 83c37de6c73d45..1cc93d905ef84d 100644
--- a/static/app/components/searchQueryBuilder/hooks/useSelectOnDrag.tsx
+++ b/static/app/components/searchQueryBuilder/hooks/useSelectOnDrag.tsx
@@ -87,8 +87,8 @@ function getItemIndexAtPosition(
y: number
) {
for (let i = 0; i < keys.length; i++) {
- const key = keys[i];
- const coords = coordinates[key];
+ const key = keys[i]!;
+ const coords = coordinates[key]!;
// If we are above this item, we must be in between this and the
// previous item on the row above it.
diff --git a/static/app/components/searchQueryBuilder/index.spec.tsx b/static/app/components/searchQueryBuilder/index.spec.tsx
index e83205cc8a53ca..a36e631d1b5110 100644
--- a/static/app/components/searchQueryBuilder/index.spec.tsx
+++ b/static/app/components/searchQueryBuilder/index.spec.tsx
@@ -163,18 +163,18 @@ describe('SearchQueryBuilder', function () {
// Should call onChange and onSearch after enter
await waitFor(() => {
expect(mockOnChange).toHaveBeenCalledTimes(1);
- expect(mockOnChange).toHaveBeenCalledWith('ab', expectedQueryState);
- expect(mockOnSearch).toHaveBeenCalledTimes(1);
- expect(mockOnSearch).toHaveBeenCalledWith('ab', expectedQueryState);
});
+ expect(mockOnChange).toHaveBeenCalledWith('ab', expectedQueryState);
+ expect(mockOnSearch).toHaveBeenCalledTimes(1);
+ expect(mockOnSearch).toHaveBeenCalledWith('ab', expectedQueryState);
await userEvent.click(document.body);
// Clicking outside activates onBlur
await waitFor(() => {
expect(mockOnBlur).toHaveBeenCalledTimes(1);
- expect(mockOnBlur).toHaveBeenCalledWith('ab', expectedQueryState);
});
+ expect(mockOnBlur).toHaveBeenCalledWith('ab', expectedQueryState);
});
});
@@ -194,8 +194,8 @@ describe('SearchQueryBuilder', function () {
await waitFor(() => {
expect(mockOnChange).toHaveBeenCalledWith('', expect.anything());
- expect(mockOnSearch).toHaveBeenCalledWith('', expect.anything());
});
+ expect(mockOnSearch).toHaveBeenCalledWith('', expect.anything());
expect(
screen.queryByRole('row', {name: 'browser.name:firefox'})
@@ -321,7 +321,7 @@ describe('SearchQueryBuilder', function () {
expect(groups).toHaveLength(3);
// First group (Field) should have age, assigned, browser.name
- const group1 = groups[0];
+ const group1 = groups[0]!;
expect(within(group1).getByRole('option', {name: 'age'})).toBeInTheDocument();
expect(within(group1).getByRole('option', {name: 'assigned'})).toBeInTheDocument();
expect(
@@ -329,13 +329,13 @@ describe('SearchQueryBuilder', function () {
).toBeInTheDocument();
// Second group (Tag) should have custom_tag_name
- const group2 = groups[1];
+ const group2 = groups[1]!;
expect(
within(group2).getByRole('option', {name: 'custom_tag_name'})
).toBeInTheDocument();
// There should be a third group for uncategorized keys
- const group3 = groups[2];
+ const group3 = groups[2]!;
expect(
within(group3).getByRole('option', {name: 'uncategorized_tag'})
).toBeInTheDocument();
@@ -398,7 +398,7 @@ describe('SearchQueryBuilder', function () {
expect(recentFilterKeys[1]).toHaveTextContent('browser');
expect(recentFilterKeys[2]).toHaveTextContent('is');
- await userEvent.click(recentFilterKeys[0]);
+ await userEvent.click(recentFilterKeys[0]!);
expect(await screen.findByRole('row', {name: 'assigned:""'})).toBeInTheDocument();
});
@@ -460,7 +460,7 @@ describe('SearchQueryBuilder', function () {
await waitFor(() => {
expect(getLastInput()).toHaveAttribute(
'aria-activedescendant',
- recentFilterKeys[0].id
+ recentFilterKeys[0]!.id
);
});
@@ -469,7 +469,7 @@ describe('SearchQueryBuilder', function () {
await waitFor(() => {
expect(getLastInput()).toHaveAttribute(
'aria-activedescendant',
- recentFilterKeys[1].id
+ recentFilterKeys[1]!.id
);
});
@@ -487,7 +487,7 @@ describe('SearchQueryBuilder', function () {
await waitFor(() => {
expect(getLastInput()).toHaveAttribute(
'aria-activedescendant',
- recentFilterKeys[0].id
+ recentFilterKeys[0]!.id
);
});
});
@@ -520,6 +520,35 @@ describe('SearchQueryBuilder', function () {
).toBeInTheDocument();
});
+ it('switches to keys menu when recent searches no longer exist', async function () {
+ const {rerender} = render(
+
+ );
+
+ await userEvent.click(getLastInput());
+
+ // Recent should be selected
+ expect(screen.getByRole('button', {name: 'Recent'})).toHaveAttribute(
+ 'aria-selected',
+ 'true'
+ );
+
+ // Rerender without recent searches
+ rerender( );
+
+ // Recent should not exist anymore
+ expect(screen.queryByRole('button', {name: 'Recent'})).not.toBeInTheDocument();
+ // All should be selected
+ expect(screen.getByRole('button', {name: 'All'})).toHaveAttribute(
+ 'aria-selected',
+ 'true'
+ );
+ });
+
it('when selecting a recent search, should reset query and call onSearch', async function () {
const mockOnSearch = jest.fn();
const mockCreateRecentSearch = MockApiClient.addMockResponse({
@@ -666,7 +695,7 @@ describe('SearchQueryBuilder', function () {
// jsdom does not support getBoundingClientRect, so we need to mock it for each item
// First freeText area is 5px wide
- freeText1.getBoundingClientRect = () => {
+ freeText1!.getBoundingClientRect = () => {
return {
top: 0,
left: 10,
@@ -677,7 +706,7 @@ describe('SearchQueryBuilder', function () {
} as DOMRect;
};
// "is:unresolved" filter is 100px wide
- filter.getBoundingClientRect = () => {
+ filter!.getBoundingClientRect = () => {
return {
top: 0,
left: 15,
@@ -688,7 +717,7 @@ describe('SearchQueryBuilder', function () {
} as DOMRect;
};
// Last freeText area is 200px wide
- freeText2.getBoundingClientRect = () => {
+ freeText2!.getBoundingClientRect = () => {
return {
top: 0,
left: 115,
@@ -856,6 +885,29 @@ describe('SearchQueryBuilder', function () {
expect(getLastInput()).toHaveFocus();
});
+
+ it('focuses the correct text input after typing boolean operators', async function () {
+ render( );
+
+ await userEvent.click(getLastInput());
+
+ // XXX(malwilley): SearchQueryBuilderInput updates state in the render
+ // function which causes an act warning despite using userEvent.click.
+ // Cannot find a way to avoid this warning.
+ jest.spyOn(console, 'error').mockImplementation(jest.fn());
+ await userEvent.keyboard('a or b{enter}');
+ jest.restoreAllMocks();
+
+ const lastInput = (await screen.findAllByTestId('query-builder-input')).at(-1);
+ expect(lastInput).toHaveFocus();
+
+ await userEvent.click(getLastInput());
+
+ // Should have three tokens: a, or, b
+ await screen.findByRole('row', {name: /a/});
+ await screen.findByRole('row', {name: /or/});
+ await screen.findByRole('row', {name: /b/});
+ });
});
describe('filter key suggestions', function () {
@@ -938,7 +990,7 @@ describe('SearchQueryBuilder', function () {
// Put focus into the first input (before the token)
await userEvent.click(
- screen.getAllByRole('combobox', {name: 'Add a search term'})[0]
+ screen.getAllByRole('combobox', {name: 'Add a search term'})[0]!
);
// Pressing delete once should focus the previous token
@@ -1409,17 +1461,17 @@ describe('SearchQueryBuilder', function () {
expect(within(screen.getByRole('listbox')).getByText('All')).toBeInTheDocument();
// First group is the selected "me"
- expect(within(groups[0]).getByRole('option', {name: 'me'})).toBeInTheDocument();
+ expect(within(groups[0]!).getByRole('option', {name: 'me'})).toBeInTheDocument();
// Second group is the remaining option in the "Suggested" section
expect(
- within(groups[1]).getByRole('option', {name: 'unassigned'})
+ within(groups[1]!).getByRole('option', {name: 'unassigned'})
).toBeInTheDocument();
// Third group are the options under the "All" section
expect(
- within(groups[2]).getByRole('option', {name: 'person1@sentry.io'})
+ within(groups[2]!).getByRole('option', {name: 'person1@sentry.io'})
).toBeInTheDocument();
expect(
- within(groups[2]).getByRole('option', {name: 'person2@sentry.io'})
+ within(groups[2]!).getByRole('option', {name: 'person2@sentry.io'})
).toBeInTheDocument();
});
diff --git a/static/app/components/searchQueryBuilder/index.stories.tsx b/static/app/components/searchQueryBuilder/index.stories.tsx
index 3a0b68dcc6454f..2a9b69235ea6f5 100644
--- a/static/app/components/searchQueryBuilder/index.stories.tsx
+++ b/static/app/components/searchQueryBuilder/index.stories.tsx
@@ -1,5 +1,6 @@
import {Fragment, useState} from 'react';
+import {ItemType} from 'sentry/components/deprecatedSmartSearchBar/types';
import MultipleCheckbox from 'sentry/components/forms/controls/multipleCheckbox';
import {SearchQueryBuilder} from 'sentry/components/searchQueryBuilder';
import {FormattedQuery} from 'sentry/components/searchQueryBuilder/formattedQuery';
@@ -8,7 +9,6 @@ import type {
FilterKeySection,
} from 'sentry/components/searchQueryBuilder/types';
import {InvalidReason} from 'sentry/components/searchSyntax/parser';
-import {ItemType} from 'sentry/components/smartSearchBar/types';
import JSXNode from 'sentry/components/stories/jsxNode';
import JSXProperty from 'sentry/components/stories/jsxProperty';
import storyBook from 'sentry/stories/storyBook';
diff --git a/static/app/components/searchQueryBuilder/index.tsx b/static/app/components/searchQueryBuilder/index.tsx
index ff0a27b3adf4cf..27566122befad9 100644
--- a/static/app/components/searchQueryBuilder/index.tsx
+++ b/static/app/components/searchQueryBuilder/index.tsx
@@ -303,6 +303,7 @@ export function SearchQueryBuilder({
}
ref={wrapperRef}
aria-disabled={disabled}
+ data-test-id="search-query-builder"
>
{
diff --git a/static/app/components/searchQueryBuilder/tokens/filter/filter.tsx b/static/app/components/searchQueryBuilder/tokens/filter/filter.tsx
index af001328692cca..15665bb382cbdd 100644
--- a/static/app/components/searchQueryBuilder/tokens/filter/filter.tsx
+++ b/static/app/components/searchQueryBuilder/tokens/filter/filter.tsx
@@ -51,10 +51,10 @@ export function FilterValueText({token}: {token: TokenResult}) {
case Token.VALUE_NUMBER_LIST:
const items = token.value.items;
- if (items.length === 1 && items[0].value) {
+ if (items.length === 1 && items[0]!.value) {
return (
- {formatFilterValue(items[0].value)}
+ {formatFilterValue(items[0]!.value)}
);
}
diff --git a/static/app/components/searchQueryBuilder/tokens/filter/parametersCombobox.tsx b/static/app/components/searchQueryBuilder/tokens/filter/parametersCombobox.tsx
index 642ded7066b69c..bcbdd2935b45a1 100644
--- a/static/app/components/searchQueryBuilder/tokens/filter/parametersCombobox.tsx
+++ b/static/app/components/searchQueryBuilder/tokens/filter/parametersCombobox.tsx
@@ -44,9 +44,9 @@ function getParameterAtCursorPosition(
let characterCount = 0;
for (let i = 0; i < items.length; i++) {
- characterCount += items[i].length + 1;
+ characterCount += items[i]!.length + 1;
if (characterCount > cursorPosition) {
- return {parameterIndex: i, textValue: items[i].trim()};
+ return {parameterIndex: i, textValue: items[i]!.trim()};
}
}
@@ -58,7 +58,7 @@ function getCursorPositionAtEndOfParameter(text: string, parameterIndex: number)
const charactersBefore =
items.slice(0, parameterIndex).join('').length + parameterIndex;
- return charactersBefore + items[parameterIndex].length;
+ return charactersBefore + items[parameterIndex]!.length;
}
function useSelectionIndex({
diff --git a/static/app/components/searchQueryBuilder/tokens/filter/parsers/string/parser.spec.tsx b/static/app/components/searchQueryBuilder/tokens/filter/parsers/string/parser.spec.tsx
index aa1333237fb8d8..43ff654f9b8a05 100644
--- a/static/app/components/searchQueryBuilder/tokens/filter/parsers/string/parser.spec.tsx
+++ b/static/app/components/searchQueryBuilder/tokens/filter/parsers/string/parser.spec.tsx
@@ -7,7 +7,7 @@ describe('parseMultiSelectValue', function () {
expect(result).not.toBeNull();
expect(result!.items).toHaveLength(1);
- expect(result?.items[0].value?.value).toEqual('a');
+ expect(result!.items[0]!.value?.value).toBe('a');
});
it('multiple value', function () {
@@ -16,9 +16,9 @@ describe('parseMultiSelectValue', function () {
expect(result).not.toBeNull();
expect(result!.items).toHaveLength(3);
- expect(result?.items[0].value?.value).toEqual('a');
- expect(result?.items[1].value?.value).toEqual('b');
- expect(result?.items[2].value?.value).toEqual('c');
+ expect(result!?.items[0]!.value?.value).toBe('a');
+ expect(result!?.items[1]!.value?.value).toBe('b');
+ expect(result!?.items[2]!.value?.value).toBe('c');
});
it('quoted value', function () {
@@ -27,13 +27,13 @@ describe('parseMultiSelectValue', function () {
expect(result).not.toBeNull();
expect(result!.items).toHaveLength(3);
- expect(result?.items[0].value?.value).toEqual('a');
+ expect(result!?.items[0]!.value?.value).toBe('a');
- expect(result?.items[1].value?.value).toEqual('b');
- expect(result?.items[1].value?.text).toEqual('"b"');
- expect(result?.items[1].value?.quoted).toBe(true);
+ expect(result!?.items[1]!.value?.value).toBe('b');
+ expect(result!?.items[1]!.value?.text).toBe('"b"');
+ expect(result!?.items[1]!.value?.quoted).toBe(true);
- expect(result?.items[2].value?.value).toEqual('c');
+ expect(result!.items[2]!.value?.value).toBe('c');
});
it('just quotes', function () {
@@ -44,9 +44,9 @@ describe('parseMultiSelectValue', function () {
expect(result!.items).toHaveLength(1);
const item = result!.items[0];
- expect(item.value?.value).toEqual('');
- expect(item.value?.text).toEqual('""');
- expect(item.value?.quoted).toBe(true);
+ expect(item!.value!?.value).toBe('');
+ expect(item!.value!?.text).toBe('""');
+ expect(item!.value!?.quoted).toBe(true);
});
it('single empty value', function () {
@@ -57,7 +57,7 @@ describe('parseMultiSelectValue', function () {
expect(result!.items).toHaveLength(1);
const item = result!.items[0];
- expect(item.value!.value).toBe('');
+ expect(item!.value!.value).toBe('');
});
it('multiple empty value', function () {
@@ -67,9 +67,9 @@ describe('parseMultiSelectValue', function () {
expect(result!.items).toHaveLength(3);
- expect(result?.items[0].value?.value).toEqual('a');
- expect(result?.items[1].value?.value).toBe('');
- expect(result?.items[2].value?.value).toEqual('b');
+ expect(result!?.items[0]!.value!?.value).toBe('a');
+ expect(result!?.items[1]!.value!?.value).toBe('');
+ expect(result!?.items[2]!.value!?.value).toBe('b');
});
it('trailing comma', function () {
@@ -79,8 +79,8 @@ describe('parseMultiSelectValue', function () {
expect(result!.items).toHaveLength(2);
- expect(result?.items[0].value?.value).toEqual('a');
- expect(result?.items[1].value?.value).toBe('');
+ expect(result!?.items[0]!.value!?.value).toBe('a');
+ expect(result!?.items[1]!.value!?.value).toBe('');
});
it('spaces', function () {
@@ -90,8 +90,8 @@ describe('parseMultiSelectValue', function () {
expect(result!.items).toHaveLength(3);
- expect(result?.items[0].value?.value).toEqual('a');
- expect(result?.items[1].value?.value).toEqual('b c');
- expect(result?.items[2].value?.value).toEqual('d');
+ expect(result!?.items[0]!.value!?.value).toBe('a');
+ expect(result!?.items[1]!.value!?.value).toBe('b c');
+ expect(result!?.items[2]!.value!?.value).toBe('d');
});
});
diff --git a/static/app/components/searchQueryBuilder/tokens/filter/valueCombobox.tsx b/static/app/components/searchQueryBuilder/tokens/filter/valueCombobox.tsx
index a225d06c60e761..6d0f941f59fccf 100644
--- a/static/app/components/searchQueryBuilder/tokens/filter/valueCombobox.tsx
+++ b/static/app/components/searchQueryBuilder/tokens/filter/valueCombobox.tsx
@@ -7,6 +7,11 @@ import type {KeyboardEvent} from '@react-types/shared';
import Checkbox from 'sentry/components/checkbox';
import type {SelectOptionWithKey} from 'sentry/components/compactSelect/types';
import {getItemsWithKeys} from 'sentry/components/compactSelect/utils';
+import {
+ ItemType,
+ type SearchGroup,
+ type SearchItem,
+} from 'sentry/components/deprecatedSmartSearchBar/types';
import {useSearchQueryBuilder} from 'sentry/components/searchQueryBuilder/context';
import {
type CustomComboboxMenu,
@@ -44,11 +49,6 @@ import {
type TokenResult,
} from 'sentry/components/searchSyntax/parser';
import {getKeyName} from 'sentry/components/searchSyntax/utils';
-import {
- ItemType,
- type SearchGroup,
- type SearchItem,
-} from 'sentry/components/smartSearchBar/types';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import type {Tag, TagCollection} from 'sentry/types/group';
@@ -447,7 +447,7 @@ function ItemCheckbox({
onChange={() => {
dispatch({
type: 'TOGGLE_FILTER_VALUE',
- token: token,
+ token,
value: escapeTagValue(value),
});
}}
@@ -596,7 +596,7 @@ export function SearchQueryBuilderValueCombobox({
dispatch({
type: 'UPDATE_TOKEN_VALUE',
- token: token,
+ token,
value: newValue,
});
@@ -609,7 +609,7 @@ export function SearchQueryBuilderValueCombobox({
dispatch({
type: 'UPDATE_TOKEN_VALUE',
- token: token,
+ token,
value: prepareInputValueForSaving(
getFilterValueType(token, fieldDefinition),
replaceCommaSeparatedValue(inputValue, selectionIndex, escapeTagValue(value))
@@ -622,7 +622,7 @@ export function SearchQueryBuilderValueCombobox({
} else {
dispatch({
type: 'UPDATE_TOKEN_VALUE',
- token: token,
+ token,
value: cleanedValue,
});
onCommit();
@@ -681,7 +681,7 @@ export function SearchQueryBuilderValueCombobox({
if (!value && !token.value.text) {
dispatch({
type: 'UPDATE_TOKEN_VALUE',
- token: token,
+ token,
value: getDefaultFilterValue({fieldDefinition}),
});
onCommit();
@@ -797,7 +797,7 @@ export function SearchQueryBuilderValueCombobox({
handleSave={newDateTimeValue => {
dispatch({
type: 'UPDATE_TOKEN_VALUE',
- token: token,
+ token,
value: newDateTimeValue,
});
onCommit();
diff --git a/static/app/components/searchQueryBuilder/tokens/filter/valueListBox.tsx b/static/app/components/searchQueryBuilder/tokens/filter/valueListBox.tsx
index 6da3c0416ffff8..7cd8c2c9b53ed4 100644
--- a/static/app/components/searchQueryBuilder/tokens/filter/valueListBox.tsx
+++ b/static/app/components/searchQueryBuilder/tokens/filter/valueListBox.tsx
@@ -82,7 +82,7 @@ export function ValueListBox>({
overlayIsOpen={isOpen}
showSectionHeaders={!filterValue}
size="sm"
- style={{maxWidth: overlayProps.style.maxWidth}}
+ style={{maxWidth: overlayProps.style!.maxWidth}}
/>
diff --git a/static/app/components/searchQueryBuilder/tokens/filter/valueSuggestions/date.tsx b/static/app/components/searchQueryBuilder/tokens/filter/valueSuggestions/date.tsx
index ecfda0ae3bac4e..53be18461d8033 100644
--- a/static/app/components/searchQueryBuilder/tokens/filter/valueSuggestions/date.tsx
+++ b/static/app/components/searchQueryBuilder/tokens/filter/valueSuggestions/date.tsx
@@ -95,7 +95,7 @@ export function getRelativeDateSuggestions(
}
const [, value] = match;
- const intValue = parseInt(value, 10);
+ const intValue = parseInt(value!, 10);
if (isNaN(intValue)) {
return makeDefaultDateSuggestions(token);
diff --git a/static/app/components/searchQueryBuilder/tokens/filterKeyListBox/index.tsx b/static/app/components/searchQueryBuilder/tokens/filterKeyListBox/index.tsx
index 866218f5a064f7..084ac3d58f73f4 100644
--- a/static/app/components/searchQueryBuilder/tokens/filterKeyListBox/index.tsx
+++ b/static/app/components/searchQueryBuilder/tokens/filterKeyListBox/index.tsx
@@ -151,7 +151,7 @@ function useHighlightFirstOptionOnSectionChange({
if (selectedSection === RECENT_SEARCH_CATEGORY_VALUE) {
return [...state.collection].filter(item => !hiddenOptions.has(item.key));
}
- const options = state.collection.getChildren?.(selectedSection ?? sections[0].value);
+ const options = state.collection.getChildren?.(selectedSection ?? sections[0]!.value);
return [...(options ?? [])].filter(option => !hiddenOptions.has(option.key));
}, [state.collection, selectedSection, sections, hiddenOptions]);
@@ -178,6 +178,28 @@ function useHighlightFirstOptionOnSectionChange({
]);
}
+// If the selected section no longer exists, switch to the first valid section
+function useSwitchToValidSection({
+ sections,
+ selectedSection,
+ setSelectedSection,
+}: {
+ sections: Section[];
+ selectedSection: Key | null;
+ setSelectedSection: (section: string) => void;
+}) {
+ useEffect(() => {
+ if (!selectedSection || !sections.length) {
+ return;
+ }
+
+ const section = sections.find(s => s.value === selectedSection);
+ if (!section) {
+ setSelectedSection(sections[0]!.value);
+ }
+ }, [sections, selectedSection, setSelectedSection]);
+}
+
function FilterKeyMenuContent>({
recentFilters,
selectedSection,
@@ -287,6 +309,8 @@ export function FilterKeyListBox>
isOpen,
});
+ useSwitchToValidSection({sections, selectedSection, setSelectedSection});
+
const fullWidth = !query;
const showDetailsPane = fullWidth && selectedSection !== RECENT_SEARCH_CATEGORY_VALUE;
diff --git a/static/app/components/searchQueryBuilder/tokens/filterKeyListBox/useFilterKeyListBox.tsx b/static/app/components/searchQueryBuilder/tokens/filterKeyListBox/useFilterKeyListBox.tsx
index 51cfbc01d7a3e4..970170e9146026 100644
--- a/static/app/components/searchQueryBuilder/tokens/filterKeyListBox/useFilterKeyListBox.tsx
+++ b/static/app/components/searchQueryBuilder/tokens/filterKeyListBox/useFilterKeyListBox.tsx
@@ -150,7 +150,7 @@ function useFilterKeySections({
const previousNumSections = usePrevious(numSections);
useEffect(() => {
if (previousNumSections !== numSections) {
- setSelectedSection(sections[0].value);
+ setSelectedSection(sections[0]!.value);
}
}, [numSections, previousNumSections, sections]);
@@ -162,7 +162,7 @@ export function useFilterKeyListBox({filterValue}: {filterValue: string}) {
const recentFilters = useRecentSearchFilters();
const {data: recentSearches} = useRecentSearches();
const {sections, selectedSection, setSelectedSection} = useFilterKeySections({
- recentSearches: recentSearches,
+ recentSearches,
});
const filterKeyMenuItems = useMemo(() => {
@@ -263,7 +263,9 @@ export function useFilterKeyListBox({filterValue}: {filterValue: string}) {
// If we are at a non-recent filter key and going up, skip to the first recent filter key
e.preventDefault();
e.stopPropagation();
- state.selectionManager.setFocusedKey(createRecentFilterOptionKey(recentFilters[0]));
+ state.selectionManager.setFocusedKey(
+ createRecentFilterOptionKey(recentFilters[0]!)
+ );
return;
},
@@ -304,7 +306,7 @@ export function useFilterKeyListBox({filterValue}: {filterValue: string}) {
0,
sectionKeyOrder.length - 1
);
- const newSectionKey = sectionKeyOrder[newIndex];
+ const newSectionKey = sectionKeyOrder[newIndex]!;
setSelectedSection(newSectionKey);
},
[sections, selectedSection, setSelectedSection]
diff --git a/static/app/components/searchQueryBuilder/tokens/filterKeyListBox/utils.tsx b/static/app/components/searchQueryBuilder/tokens/filterKeyListBox/utils.tsx
index 6e65f2d675cad2..6545cb84fe462a 100644
--- a/static/app/components/searchQueryBuilder/tokens/filterKeyListBox/utils.tsx
+++ b/static/app/components/searchQueryBuilder/tokens/filterKeyListBox/utils.tsx
@@ -16,6 +16,7 @@ import type {
} from 'sentry/components/searchQueryBuilder/types';
import {t} from 'sentry/locale';
import type {RecentSearch, Tag, TagCollection} from 'sentry/types/group';
+import {defined} from 'sentry/utils';
import {type FieldDefinition, FieldKind} from 'sentry/utils/fields';
import {escapeFilterValue} from 'sentry/utils/tokenizeSearch';
@@ -70,9 +71,14 @@ export function createSection(
key: section.value,
value: section.value,
label: section.label,
- options: section.children.map(key =>
- createItem(keys[key], getFieldDefinition(key), section)
- ),
+ options: section.children
+ .map(key => {
+ if (!keys[key]) {
+ return null;
+ }
+ return createItem(keys[key], getFieldDefinition(key), section);
+ })
+ .filter(defined),
type: 'section',
};
}
diff --git a/static/app/components/searchQueryBuilder/tokens/freeText.tsx b/static/app/components/searchQueryBuilder/tokens/freeText.tsx
index 62c7cda67c292a..91731ee7abc698 100644
--- a/static/app/components/searchQueryBuilder/tokens/freeText.tsx
+++ b/static/app/components/searchQueryBuilder/tokens/freeText.tsx
@@ -21,7 +21,11 @@ import type {
FieldDefinitionGetter,
FocusOverride,
} from 'sentry/components/searchQueryBuilder/types';
-import {recentSearchTypeToLabel} from 'sentry/components/searchQueryBuilder/utils';
+import {
+ collapseTextTokens,
+ parseTokenKey,
+ recentSearchTypeToLabel,
+} from 'sentry/components/searchQueryBuilder/utils';
import {
InvalidReason,
type ParseResultToken,
@@ -131,12 +135,37 @@ function calculateNextFocusForFilter(state: ListState): FocusO
}
function calculateNextFocusForInsertedToken(item: Node): FocusOverride {
- const [, tokenTypeIndexStr] = item.key.toString().split(':');
+ const {index} = parseTokenKey(item.key.toString());
+
+ return {
+ itemKey: `${Token.FREE_TEXT}:${index + 1}`,
+ };
+}
- const tokenTypeIndex = parseInt(tokenTypeIndexStr, 10);
+function calculateNextFocusForCommittedCustomValue({
+ value,
+ currentFocusedKey,
+}: {
+ currentFocusedKey: string;
+ value: string;
+}): FocusOverride | undefined {
+ const {tokenType, index} = parseTokenKey(currentFocusedKey.toString());
+
+ const parsedText = collapseTextTokens(parseSearch(value));
+ const numFreeTextTokens = Math.max(
+ parsedText?.filter(token => token.type === Token.FREE_TEXT).length ?? 0
+ );
+
+ // We always expect there to be at least one free text token, so we subtract one
+ // to get the index of the next token to focus.
+ const diff = Math.max(0, numFreeTextTokens - 1);
+
+ if (diff <= 0) {
+ return undefined;
+ }
return {
- itemKey: `${Token.FREE_TEXT}:${tokenTypeIndex + 1}`,
+ itemKey: `${tokenType}:${index + diff}`,
};
}
@@ -409,11 +438,27 @@ function SearchQueryBuilderInputInternal({
});
}}
onCustomValueBlurred={value => {
- dispatch({type: 'UPDATE_FREE_TEXT', tokens: [token], text: value});
+ dispatch({
+ type: 'UPDATE_FREE_TEXT',
+ tokens: [token],
+ text: value,
+ focusOverride: calculateNextFocusForCommittedCustomValue({
+ currentFocusedKey: item.key.toString(),
+ value,
+ }),
+ });
resetInputValue();
}}
onCustomValueCommitted={value => {
- dispatch({type: 'UPDATE_FREE_TEXT', tokens: [token], text: value});
+ dispatch({
+ type: 'UPDATE_FREE_TEXT',
+ tokens: [token],
+ text: value,
+ focusOverride: calculateNextFocusForCommittedCustomValue({
+ currentFocusedKey: item.key.toString(),
+ value,
+ }),
+ });
resetInputValue();
// Because the query does not change until a subsequent render,
diff --git a/static/app/components/searchQueryBuilder/tokens/useSortedFilterKeyItems.tsx b/static/app/components/searchQueryBuilder/tokens/useSortedFilterKeyItems.tsx
index 6854775a7545a4..bedf15f74e275a 100644
--- a/static/app/components/searchQueryBuilder/tokens/useSortedFilterKeyItems.tsx
+++ b/static/app/components/searchQueryBuilder/tokens/useSortedFilterKeyItems.tsx
@@ -180,7 +180,7 @@ export function useSortedFilterKeyItems({
.map(({item}) => item)
.filter(item => item.type === 'key' && filterKeys[item.item.key])
.map(({item}) => {
- return createItem(filterKeys[item.key], getFieldDefinition(item.key));
+ return createItem(filterKeys[item.key]!, getFieldDefinition(item.key));
});
if (includeSuggestions) {
diff --git a/static/app/components/searchQueryBuilder/utils.tsx b/static/app/components/searchQueryBuilder/utils.tsx
index 740705c5cc9021..2591f15000ef7f 100644
--- a/static/app/components/searchQueryBuilder/utils.tsx
+++ b/static/app/components/searchQueryBuilder/utils.tsx
@@ -111,6 +111,12 @@ export function makeTokenKey(token: ParseResultToken, allTokens: ParseResult | n
return `${token.type}:${tokenTypeIndex}`;
}
+export function parseTokenKey(key: string) {
+ const [tokenType, indexStr] = key.split(':');
+ const index = parseInt(indexStr!, 10);
+ return {tokenType, index};
+}
+
const isSimpleTextToken = (
token: ParseResultToken
): token is TokenResult | TokenResult => {
@@ -121,7 +127,7 @@ const isSimpleTextToken = (
* Collapse adjacent FREE_TEXT and SPACES tokens into a single token.
* This is useful for rendering the minimum number of inputs in the UI.
*/
-function collapseTextTokens(tokens: ParseResult | null) {
+export function collapseTextTokens(tokens: ParseResult | null) {
if (!tokens) {
return null;
}
@@ -138,7 +144,7 @@ function collapseTextTokens(tokens: ParseResult | null) {
return [token];
}
- const lastToken = acc[acc.length - 1];
+ const lastToken = acc[acc.length - 1]!;
if (isSimpleTextToken(token) && isSimpleTextToken(lastToken)) {
const freeTextToken = lastToken as TokenResult;
diff --git a/static/app/components/searchSyntax/evaluator.tsx b/static/app/components/searchSyntax/evaluator.tsx
index 5fea88fa7d4403..ca47bb067307f5 100644
--- a/static/app/components/searchSyntax/evaluator.tsx
+++ b/static/app/components/searchSyntax/evaluator.tsx
@@ -51,7 +51,7 @@ export function toFlattened(tokens: TokenResult[]): ProcessedTokenResult[
}
for (let i = 0; i < tokens.length; i++) {
- flatten(tokens[i]);
+ flatten(tokens[i]!);
}
return flattened_result;
@@ -74,14 +74,14 @@ export function insertImplicitAND(
for (let i = 0; i < tokens.length; i++) {
const next = tokens[i + 1];
- with_implicit_and.push(tokens[i]);
+ with_implicit_and.push(tokens[i]!);
// If current is not a logic boolean and next is not a logic boolean, insert an implicit AND.
if (
next &&
next.type !== Token.LOGIC_BOOLEAN &&
- tokens[i].type !== Token.LOGIC_BOOLEAN &&
- tokens[i].type !== 'L_PAREN' &&
+ tokens[i]!.type !== Token.LOGIC_BOOLEAN &&
+ tokens[i]!.type !== 'L_PAREN' &&
next.type !== 'R_PAREN'
) {
with_implicit_and.push(AND);
@@ -118,9 +118,9 @@ export function toPostFix(tokens: TokenResult[]): ProcessedTokenResult[]
// we need to pop the AND operator from the stack and push it to the output.
stack.length > 0 &&
token.value === BooleanOperator.OR &&
- stack[stack.length - 1].type === Token.LOGIC_BOOLEAN &&
- stack[stack.length - 1].type !== 'L_PAREN' &&
- isBooleanAND(stack[stack.length - 1])
+ stack[stack.length - 1]!.type === Token.LOGIC_BOOLEAN &&
+ stack[stack.length - 1]!.type !== 'L_PAREN' &&
+ isBooleanAND(stack[stack.length - 1]!)
) {
result.push(stack.pop()!);
}
@@ -132,7 +132,7 @@ export function toPostFix(tokens: TokenResult[]): ProcessedTokenResult[]
break;
case 'R_PAREN': {
while (stack.length > 0) {
- const top = stack[stack.length - 1];
+ const top = stack[stack.length - 1]!;
if (top.type === 'L_PAREN') {
stack.pop();
break;
diff --git a/static/app/components/searchSyntax/grammar.pegjs b/static/app/components/searchSyntax/grammar.pegjs
index 9a8bfb2685c575..55eedb460fe05b 100644
--- a/static/app/components/searchSyntax/grammar.pegjs
+++ b/static/app/components/searchSyntax/grammar.pegjs
@@ -234,6 +234,16 @@ explicit_tag_key
return tc.tokenKeyExplicitTag(prefix, key);
}
+explicit_string_tag_key
+ = prefix:"tags" open_bracket key:search_key spaces comma spaces 'string' closed_bracket {
+ return tc.tokenKeyExplicitStringTag(prefix, key)
+ }
+
+explicit_number_tag_key
+ = prefix:"tags" open_bracket key:search_key spaces comma spaces 'number' closed_bracket {
+ return tc.tokenKeyExplicitNumberTag(prefix, key)
+ }
+
aggregate_key
= name:key open_paren s1:spaces args:function_args? s2:spaces closed_paren {
return tc.tokenKeyAggregate(name, args, s1, s2);
@@ -259,10 +269,10 @@ quoted_aggregate_param
}
search_key
- = key / quoted_key
+ = explicit_number_tag_key / key / quoted_key
text_key
- = explicit_tag_key / search_key
+ = explicit_tag_key / explicit_string_tag_key / search_key
// Filter values
diff --git a/static/app/components/searchSyntax/parser.spec.tsx b/static/app/components/searchSyntax/parser.spec.tsx
index fb59ab8d2fb25a..4ae87649b9482c 100644
--- a/static/app/components/searchSyntax/parser.spec.tsx
+++ b/static/app/components/searchSyntax/parser.spec.tsx
@@ -127,9 +127,9 @@ describe('searchSyntax/parser', function () {
const barTag = result[7] as TokenResult;
expect(foo.warning).toBe('foo warning');
- expect(bar.warning).toBe(null);
+ expect(bar.warning).toBeNull();
expect(fooTag.warning).toBe('foo warning');
- expect(barTag.warning).toBe(null);
+ expect(barTag.warning).toBeNull();
});
it('applies disallowFreeText', () => {
@@ -149,7 +149,7 @@ describe('searchSyntax/parser', function () {
const foo = result[1] as TokenResult;
const test = result[3] as TokenResult;
- expect(foo.invalid).toBe(null);
+ expect(foo.invalid).toBeNull();
expect(test.invalid).toEqual({
type: InvalidReason.FREE_TEXT_NOT_ALLOWED,
reason: 'Custom message',
@@ -174,12 +174,12 @@ describe('searchSyntax/parser', function () {
const or = result[3] as TokenResult;
const and = result[5] as TokenResult;
- expect(foo.invalid).toBe(null);
+ expect(foo.invalid).toBeNull();
expect(or.invalid).toEqual({
type: InvalidReason.LOGICAL_OR_NOT_ALLOWED,
reason: 'Custom message',
});
- expect(and.invalid).toBe(null);
+ expect(and.invalid).toBeNull();
});
it('applies disallowLogicalOperators (AND)', () => {
@@ -200,8 +200,8 @@ describe('searchSyntax/parser', function () {
const or = result[3] as TokenResult;
const and = result[5] as TokenResult;
- expect(foo.invalid).toBe(null);
- expect(or.invalid).toBe(null);
+ expect(foo.invalid).toBeNull();
+ expect(or.invalid).toBeNull();
expect(and.invalid).toEqual({
type: InvalidReason.LOGICAL_AND_NOT_ALLOWED,
reason: 'Custom message',
@@ -224,7 +224,7 @@ describe('searchSyntax/parser', function () {
const foo = result[1] as TokenResult;
- expect(foo.negated).toEqual(true);
+ expect(foo.negated).toBe(true);
expect(foo.invalid).toEqual({
type: InvalidReason.NEGATION_NOT_ALLOWED,
reason: 'Custom message',
diff --git a/static/app/components/searchSyntax/parser.tsx b/static/app/components/searchSyntax/parser.tsx
index 92c2970364247f..dd60737bea9a9f 100644
--- a/static/app/components/searchSyntax/parser.tsx
+++ b/static/app/components/searchSyntax/parser.tsx
@@ -43,6 +43,8 @@ export enum Token {
LOGIC_BOOLEAN = 'logicBoolean',
KEY_SIMPLE = 'keySimple',
KEY_EXPLICIT_TAG = 'keyExplicitTag',
+ KEY_EXPLICIT_NUMBER_TAG = 'keyExplicitNumberTag',
+ KEY_EXPLICIT_STRING_TAG = 'keyExplicitStringTag',
KEY_AGGREGATE = 'keyAggregate',
KEY_AGGREGATE_ARGS = 'keyAggregateArgs',
KEY_AGGREGATE_PARAMS = 'keyAggregateParam',
@@ -127,7 +129,11 @@ export const interchangeableFilterOperators = {
[FilterType.DATE]: [FilterType.SPECIFIC_DATE],
};
-const textKeys = [Token.KEY_SIMPLE, Token.KEY_EXPLICIT_TAG] as const;
+const textKeys = [
+ Token.KEY_SIMPLE,
+ Token.KEY_EXPLICIT_TAG,
+ Token.KEY_EXPLICIT_STRING_TAG,
+] as const;
/**
* This constant-type configuration object declares how each filter type
@@ -486,6 +492,26 @@ export class TokenConverter {
key,
});
+ tokenKeyExplicitStringTag = (
+ prefix: string,
+ key: ReturnType
+ ) => ({
+ ...this.defaultTokenFields,
+ type: Token.KEY_EXPLICIT_STRING_TAG as const,
+ prefix,
+ key,
+ });
+
+ tokenKeyExplicitNumberTag = (
+ prefix: string,
+ key: ReturnType
+ ) => ({
+ ...this.defaultTokenFields,
+ type: Token.KEY_EXPLICIT_NUMBER_TAG as const,
+ prefix,
+ key,
+ });
+
tokenKeyAggregateParam = (value: string, quoted: boolean) => ({
...this.defaultTokenFields,
type: Token.KEY_AGGREGATE_PARAMS as const,
@@ -526,7 +552,7 @@ export class TokenConverter {
) => ({
...this.defaultTokenFields,
type: Token.VALUE_ISO_8601_DATE as const,
- value: value,
+ value,
parsed: this.config.parse ? parseDate(value) : undefined,
date: date.flat().join(''),
time: Array.isArray(time) ? time.flat().flat().join('').replace('T', '') : time,
@@ -540,7 +566,7 @@ export class TokenConverter {
) => ({
...this.defaultTokenFields,
type: Token.VALUE_RELATIVE_DATE as const,
- value: value,
+ value,
parsed: this.config.parse ? parseRelativeDate(value, {unit, sign}) : undefined,
sign,
unit,
@@ -553,7 +579,7 @@ export class TokenConverter {
...this.defaultTokenFields,
type: Token.VALUE_DURATION as const,
- value: value,
+ value,
parsed: this.config.parse ? parseDuration(value, unit) : undefined,
unit,
});
@@ -584,7 +610,7 @@ export class TokenConverter {
) => ({
...this.defaultTokenFields,
type: Token.VALUE_SIZE as const,
- value: value,
+ value,
// units are case insensitive, normalize them in their parsed representation
// so that we dont have to compare all possible permutations.
parsed: this.config.parse ? parseSize(value, unit) : undefined,
@@ -594,14 +620,14 @@ export class TokenConverter {
tokenValuePercentage = (value: string) => ({
...this.defaultTokenFields,
type: Token.VALUE_PERCENTAGE as const,
- value: value,
+ value,
parsed: this.config.parse ? parsePercentage(value) : undefined,
});
tokenValueBoolean = (value: string) => ({
...this.defaultTokenFields,
type: Token.VALUE_BOOLEAN as const,
- value: value,
+ value,
parsed: this.config.parse ? parseBoolean(value) : undefined,
});
@@ -771,13 +797,25 @@ export class TokenConverter {
*/
checkFilterWarning = (key: FilterMap[T]['key']) => {
if (
- ![Token.KEY_SIMPLE, Token.KEY_EXPLICIT_TAG, Token.KEY_AGGREGATE].includes(key.type)
+ ![
+ Token.KEY_SIMPLE,
+ Token.KEY_EXPLICIT_TAG,
+ Token.KEY_AGGREGATE,
+ Token.KEY_EXPLICIT_NUMBER_TAG,
+ Token.KEY_EXPLICIT_STRING_TAG,
+ ].includes(key.type)
) {
return null;
}
const keyName = getKeyName(
- key as TokenResult
+ key as TokenResult<
+ | Token.KEY_SIMPLE
+ | Token.KEY_EXPLICIT_TAG
+ | Token.KEY_AGGREGATE
+ | Token.KEY_EXPLICIT_NUMBER_TAG
+ | Token.KEY_EXPLICIT_STRING_TAG
+ >
);
return this.config.getFilterTokenWarning?.(keyName) ?? null;
};
@@ -838,7 +876,10 @@ export class TokenConverter {
*/
checkInvalidTextFilter = (key: TextFilter['key'], value: TextFilter['value']) => {
// Explicit tag keys will always be treated as text filters
- if (key.type === Token.KEY_EXPLICIT_TAG) {
+ if (
+ key.type === Token.KEY_EXPLICIT_TAG ||
+ key.type === Token.KEY_EXPLICIT_STRING_TAG
+ ) {
return this.checkInvalidTextValue(value);
}
@@ -1425,7 +1466,7 @@ export function joinQuery(
return (
(leadingSpace ? ' ' : '') +
(parsedTerms.length === 1
- ? parsedTerms[0].text
+ ? parsedTerms[0]!.text
: parsedTerms.map(p => p.text).join(additionalSpaceBetween ? ' ' : ''))
);
}
diff --git a/static/app/components/searchSyntax/renderer.tsx b/static/app/components/searchSyntax/renderer.tsx
index f8784c5025d4ce..e1854d464f7965 100644
--- a/static/app/components/searchSyntax/renderer.tsx
+++ b/static/app/components/searchSyntax/renderer.tsx
@@ -224,7 +224,13 @@ function KeyToken({
token,
negated,
}: {
- token: TokenResult;
+ token: TokenResult<
+ | Token.KEY_SIMPLE
+ | Token.KEY_AGGREGATE
+ | Token.KEY_EXPLICIT_TAG
+ | Token.KEY_EXPLICIT_NUMBER_TAG
+ | Token.KEY_EXPLICIT_STRING_TAG
+ >;
negated?: boolean;
}) {
let value: React.ReactNode = token.text;
diff --git a/static/app/components/searchSyntax/utils.tsx b/static/app/components/searchSyntax/utils.tsx
index e9d8c669d08f17..7c7a35137f6b65 100644
--- a/static/app/components/searchSyntax/utils.tsx
+++ b/static/app/components/searchSyntax/utils.tsx
@@ -104,6 +104,12 @@ export function treeResultLocator({
nodeVisitor(token.argsSpaceBefore);
nodeVisitor(token.argsSpaceAfter);
break;
+ case Token.KEY_EXPLICIT_NUMBER_TAG:
+ nodeVisitor(token.key);
+ break;
+ case Token.KEY_EXPLICIT_STRING_TAG:
+ nodeVisitor(token.key);
+ break;
case Token.LOGIC_GROUP:
token.inner.forEach(nodeVisitor);
break;
@@ -172,6 +178,16 @@ export function treeTransformer({tree, transform}: TreeTransformerOpts) {
argsSpaceBefore: nodeVisitor(token.argsSpaceBefore),
argsSpaceAfter: nodeVisitor(token.argsSpaceAfter),
});
+ case Token.KEY_EXPLICIT_NUMBER_TAG:
+ return transform({
+ ...token,
+ key: nodeVisitor(token.key),
+ });
+ case Token.KEY_EXPLICIT_STRING_TAG:
+ return transform({
+ ...token,
+ key: nodeVisitor(token.key),
+ });
case Token.LOGIC_GROUP:
return transform({
...token,
@@ -213,7 +229,13 @@ type GetKeyNameOpts = {
* Utility to get the string name of any type of key.
*/
export const getKeyName = (
- key: TokenResult,
+ key: TokenResult<
+ | Token.KEY_SIMPLE
+ | Token.KEY_EXPLICIT_TAG
+ | Token.KEY_AGGREGATE
+ | Token.KEY_EXPLICIT_NUMBER_TAG
+ | Token.KEY_EXPLICIT_STRING_TAG
+ >,
options: GetKeyNameOpts = {}
) => {
const {aggregateWithArgs, showExplicitTagPrefix = false} = options;
@@ -229,6 +251,15 @@ export const getKeyName = (
return aggregateWithArgs
? `${key.name.value}(${key.args ? key.args.text : ''})`
: key.name.value;
+ case Token.KEY_EXPLICIT_NUMBER_TAG:
+ // number tags always need to be expressed with the
+ // explicit tag prefix + type
+ return key.text;
+ case Token.KEY_EXPLICIT_STRING_TAG:
+ if (showExplicitTagPrefix) {
+ return key.text;
+ }
+ return key.key.value;
default:
return '';
}
@@ -295,6 +326,10 @@ export function stringifyToken(token: TokenResult) {
return token.text;
case Token.KEY_EXPLICIT_TAG:
return `${token.prefix}[${token.key.value}]`;
+ case Token.KEY_EXPLICIT_NUMBER_TAG:
+ return `${token.prefix}[${token.key.value},number]`;
+ case Token.KEY_EXPLICIT_STRING_TAG:
+ return `${token.prefix}[${token.key.value},string]`;
case Token.VALUE_TEXT:
return token.quoted ? `"${token.value}"` : token.value;
case Token.VALUE_RELATIVE_DATE:
diff --git a/static/app/components/sidebar/index.tsx b/static/app/components/sidebar/index.tsx
index bae6b5408a7e31..b777c55631f2b8 100644
--- a/static/app/components/sidebar/index.tsx
+++ b/static/app/components/sidebar/index.tsx
@@ -34,6 +34,7 @@ import {
IconSiren,
IconStats,
IconSupport,
+ IconTelescope,
IconTimer,
} from 'sentry/icons';
import {t} from 'sentry/locale';
@@ -252,7 +253,7 @@ function Sidebar() {
/>
);
- const discover2 = hasOrganization && (
+ const discover = hasOrganization && (
}
+ // In errors-only deploys, Discover isn't a nested link, so it needs a proper icon
+ icon={isSelfHostedErrorsOnly ? : }
label={{t('Discover')} }
to={getDiscoverLandingUrl(organization)}
id="discover-v2"
@@ -502,7 +504,7 @@ function Sidebar() {
{metrics}
{profiling}
{replays}
- {discover2}
+ {discover}
);
@@ -565,7 +567,7 @@ function Sidebar() {
{alerts}
- {discover2}
+ {discover}
{dashboards}
{releases}
{userFeedback}
@@ -640,9 +642,9 @@ function Sidebar() {
)}
-
+
{HookStore.get('sidebar:bottom-items').length > 0 &&
- HookStore.get('sidebar:bottom-items')[0]({
+ HookStore.get('sidebar:bottom-items')[0]!({
orientation,
collapsed,
hasPanel,
@@ -803,6 +805,7 @@ const SubitemDot = styled('div')<{collapsed: boolean}>`
`;
const SidebarSection = styled(SidebarSectionGroup)<{
+ centeredItems?: boolean;
hasNewNav?: boolean;
noMargin?: boolean;
noPadding?: boolean;
@@ -823,6 +826,12 @@ const SidebarSection = styled(SidebarSectionGroup)<{
}
`}
+ ${p =>
+ p.centeredItems &&
+ css`
+ align-items: center;
+ `}
+
&:empty {
display: none;
}
diff --git a/static/app/components/sidebar/newOnboardingStatus.spec.tsx b/static/app/components/sidebar/newOnboardingStatus.spec.tsx
new file mode 100644
index 00000000000000..466f0dd7bff180
--- /dev/null
+++ b/static/app/components/sidebar/newOnboardingStatus.spec.tsx
@@ -0,0 +1,122 @@
+import {OrganizationFixture} from 'sentry-fixture/organization';
+import {UserFixture} from 'sentry-fixture/user';
+
+import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary';
+
+import {NewOnboardingStatus} from 'sentry/components/sidebar/newOnboardingStatus';
+import {SidebarPanelKey} from 'sentry/components/sidebar/types';
+import {OnboardingTaskKey} from 'sentry/types/onboarding';
+import type {Organization} from 'sentry/types/organization';
+
+function renderMockRequests(organization: Organization) {
+ const getOnboardingTasksMock = MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/onboarding-tasks/`,
+ method: 'GET',
+ body: {
+ onboardingTasks: organization.onboardingTasks,
+ },
+ });
+
+ const postOnboardingTasksMock = MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/onboarding-tasks/`,
+ method: 'POST',
+ body: {task: OnboardingTaskKey.FIRST_PROJECT, completionSeen: true},
+ });
+
+ return {getOnboardingTasksMock, postOnboardingTasksMock};
+}
+
+describe('Onboarding Status', function () {
+ it('panel is collapsed and has pending tasks to be seen', async function () {
+ const organization = OrganizationFixture({
+ features: ['onboarding'],
+ onboardingTasks: [
+ {
+ task: OnboardingTaskKey.FIRST_PROJECT,
+ status: 'complete',
+ user: UserFixture(),
+ completionSeen: undefined,
+ dateCompleted: undefined,
+ },
+ ],
+ });
+
+ const {getOnboardingTasksMock, postOnboardingTasksMock} =
+ renderMockRequests(organization);
+
+ const handleShowPanel = jest.fn();
+
+ render(
+ ,
+ {
+ organization,
+ }
+ );
+
+ expect(screen.getByText('1 completed task')).toBeInTheDocument();
+ expect(screen.getByTestId('pending-seen-indicator')).toBeInTheDocument();
+
+ // By hovering over the button, we should refetch the data
+ await userEvent.hover(screen.getByRole('button', {name: 'Onboarding'}));
+ await waitFor(() => expect(getOnboardingTasksMock).toHaveBeenCalled());
+
+ // Open the panel
+ await userEvent.click(screen.getByRole('button', {name: 'Onboarding'}));
+ await waitFor(() => expect(getOnboardingTasksMock).toHaveBeenCalled());
+ await waitFor(() => expect(postOnboardingTasksMock).toHaveBeenCalled());
+ expect(handleShowPanel).toHaveBeenCalled();
+ });
+
+ it('panel is expanded and has no pending tasks to be seen', async function () {
+ const organization = OrganizationFixture({
+ features: ['onboarding'],
+ onboardingTasks: [
+ {
+ task: OnboardingTaskKey.FIRST_PROJECT,
+ status: 'complete',
+ user: UserFixture(),
+ completionSeen: '2024-12-16T14:52:01.385227Z',
+ dateCompleted: '2024-12-13T09:35:05.010028Z',
+ },
+ ],
+ });
+
+ const {getOnboardingTasksMock} = renderMockRequests(organization);
+
+ const handleHidePanel = jest.fn();
+
+ render(
+ ,
+ {
+ organization,
+ }
+ );
+
+ expect(screen.getByText('1 completed task')).toBeInTheDocument();
+
+ // Do not show the pending indicator
+ expect(screen.queryByTestId('pending-seen-indicator')).not.toBeInTheDocument();
+
+ // Shows the panel
+ expect(screen.getByText('Quick Setup')).toBeInTheDocument();
+
+ // Triggers a fetch request
+ expect(getOnboardingTasksMock).toHaveBeenCalled();
+
+ // Hide Panel
+ await userEvent.click(screen.getByLabelText('Close Panel'));
+ await waitFor(() => expect(handleHidePanel).toHaveBeenCalled());
+ });
+});
diff --git a/static/app/components/sidebar/newOnboardingStatus.tsx b/static/app/components/sidebar/newOnboardingStatus.tsx
index 2cb24e6e1a6880..6241c333812957 100644
--- a/static/app/components/sidebar/newOnboardingStatus.tsx
+++ b/static/app/components/sidebar/newOnboardingStatus.tsx
@@ -5,17 +5,22 @@ import styled from '@emotion/styled';
import {updateOnboardingTask} from 'sentry/actionCreators/onboardingTasks';
import {OnboardingContext} from 'sentry/components/onboarding/onboardingContext';
+import {DeprecatedNewOnboardingSidebar} from 'sentry/components/onboardingWizard/deprecatedNewSidebar';
import {NewOnboardingSidebar} from 'sentry/components/onboardingWizard/newSidebar';
import {getMergedTasks} from 'sentry/components/onboardingWizard/taskConfig';
import {useOnboardingTasks} from 'sentry/components/onboardingWizard/useOnboardingTasks';
-import {findCompleteTasks, taskIsDone} from 'sentry/components/onboardingWizard/utils';
+import {
+ findCompleteTasks,
+ hasQuickStartUpdatesFeatureGA,
+ taskIsDone,
+} from 'sentry/components/onboardingWizard/utils';
import ProgressRing, {
RingBackground,
RingBar,
RingText,
} from 'sentry/components/progressRing';
import {ExpandedContext} from 'sentry/components/sidebar/expandedContextProvider';
-import {t, tct} from 'sentry/locale';
+import {t, tn} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {trackAnalytics} from 'sentry/utils/analytics';
import {isDemoModeEnabled} from 'sentry/utils/demoMode';
@@ -115,7 +120,7 @@ export function NewOnboardingStatus({
}
trackAnalytics('quick_start.completed', {
- organization: organization,
+ organization,
referrer: 'onboarding_sidebar',
new_experience: true,
});
@@ -151,6 +156,7 @@ export function NewOnboardingStatus({
aria-label={label}
onClick={handleShowPanel}
isActive={isActive}
+ showText={!shouldAccordionFloat}
onMouseEnter={() => {
refetch();
}}
@@ -171,28 +177,35 @@ export function NewOnboardingStatus({
{!shouldAccordionFloat && (
{label}
-
+
{walkthrough
- ? tct('[totalCompletedTasks] completed tours', {
- totalCompletedTasks: doneTasks.length,
- })
- : tct('[totalCompletedTasks] completed tasks', {
- totalCompletedTasks: doneTasks.length,
- })}
- {pendingCompletionSeen && }
+ ? tn('%s completed tour', '%s completed tours', doneTasks.length)
+ : tn('%s completed task', '%s completed tasks', doneTasks.length)}
+ {pendingCompletionSeen && (
+
+ )}
)}
- {isActive && (
-
- )}
+ {isActive &&
+ (hasQuickStartUpdatesFeatureGA(organization) ? (
+
+ ) : (
+
+ ))}
);
}
@@ -242,11 +255,11 @@ const hoverCss = (p: {theme: Theme}) => css`
}
`;
-const Container = styled('div')<{isActive: boolean}>`
- padding: 9px 19px 9px 16px;
+const Container = styled('div')<{isActive: boolean; showText: boolean}>`
+ padding: 9px 16px;
cursor: pointer;
display: grid;
- grid-template-columns: max-content 1fr;
+ grid-template-columns: ${p => (p.showText ? 'max-content 1fr' : 'max-content')};
gap: ${space(1.5)};
align-items: center;
transition: background 100ms;
diff --git a/static/app/components/sidebar/sidebarItem.tsx b/static/app/components/sidebar/sidebarItem.tsx
index b03d510f79ba92..87e99b0f1f8abc 100644
--- a/static/app/components/sidebar/sidebarItem.tsx
+++ b/static/app/components/sidebar/sidebarItem.tsx
@@ -335,7 +335,7 @@ export function isItemActive(
!location.pathname.startsWith('/settings/')) ||
(item?.label === 'Releases' && location.pathname.includes('/release-thresholds/')) ||
(item?.label === 'Performance' &&
- location.pathname.includes('/performance/') &&
+ location.pathname.startsWith('/performance/') &&
!location.pathname.startsWith('/settings/'))
);
}
diff --git a/static/app/components/sidebar/sidebarPanel.tsx b/static/app/components/sidebar/sidebarPanel.tsx
index a4fb5e3a0ff2b6..ce29b50ce283db 100644
--- a/static/app/components/sidebar/sidebarPanel.tsx
+++ b/static/app/components/sidebar/sidebarPanel.tsx
@@ -4,6 +4,7 @@ import {css} from '@emotion/react';
import styled from '@emotion/styled';
import {IconClose} from 'sentry/icons';
+import {t} from 'sentry/locale';
import HookStore from 'sentry/stores/hookStore';
import {slideInLeft} from 'sentry/styles/animations';
import {space} from 'sentry/styles/space';
@@ -110,7 +111,7 @@ function SidebarPanel({
{title ? (
{title}
-
+
) : null}
{children}
diff --git a/static/app/components/slideOverPanel.tsx b/static/app/components/slideOverPanel.tsx
index e0e83419acd882..567a5dca02fd4b 100644
--- a/static/app/components/slideOverPanel.tsx
+++ b/static/app/components/slideOverPanel.tsx
@@ -93,10 +93,10 @@ const _SlideOverPanel = styled(motion.div, {
}>`
position: fixed;
- top: ${space(2)};
- right: 0;
+ top: ${p => (p.slidePosition === 'left' ? '54px' : space(2))};
+ right: ${p => (p.slidePosition === 'left' ? space(2) : 0)};
bottom: ${space(2)};
- left: ${space(2)};
+ left: ${p => (p.slidePosition === 'left' ? 0 : space(2))};
overflow: auto;
pointer-events: auto;
@@ -139,6 +139,7 @@ const _SlideOverPanel = styled(motion.div, {
position: relative;
width: ${PANEL_WIDTH};
+ min-width: 500px;
height: 100%;
top: 0;
diff --git a/static/app/components/slider/index.tsx b/static/app/components/slider/index.tsx
index e695816effedde..93d880251bcefb 100644
--- a/static/app/components/slider/index.tsx
+++ b/static/app/components/slider/index.tsx
@@ -125,14 +125,14 @@ function BaseSlider(
onChange: indexValue =>
onChange?.(
Array.isArray(indexValue)
- ? indexValue.map(i => allowedValues[i])
- : allowedValues[indexValue]
+ ? indexValue.map(i => allowedValues[i]!)
+ : allowedValues[indexValue]!
),
onChangeEnd: indexValue =>
onChangeEnd?.(
Array.isArray(indexValue)
- ? indexValue.map(i => allowedValues[i])
- : allowedValues[indexValue]
+ ? indexValue.map(i => allowedValues[i]!)
+ : allowedValues[indexValue]!
),
}),
};
@@ -176,7 +176,7 @@ function BaseSlider(
if (nThumbs > 1) {
return refs.current;
}
- return refs.current[0];
+ return refs.current[0]!;
}, [nThumbs]);
const getFormattedValue = useCallback(
@@ -185,8 +185,8 @@ function BaseSlider(
// like an index for `allowedValues`.
if (allowedValues) {
return formatLabel
- ? formatLabel(allowedValues[val])
- : state.getFormattedValue(allowedValues[val]);
+ ? formatLabel(allowedValues[val]!)
+ : state.getFormattedValue(allowedValues[val]!);
}
return formatLabel ? formatLabel(val) : state.getFormattedValue(val);
@@ -214,10 +214,10 @@ function BaseSlider(
{label}
{nThumbs > 1
- ? `${getFormattedValue(selectedRange[0])}–${getFormattedValue(
- selectedRange[1]
+ ? `${getFormattedValue(selectedRange[0]!)}–${getFormattedValue(
+ selectedRange[1]!
)}`
- : getFormattedValue(selectedRange[1])}
+ : getFormattedValue(selectedRange[1]!)}
)}
@@ -234,8 +234,8 @@ function BaseSlider(
disabled={disabled}
error={error}
style={{
- left: `${state.getValuePercent(selectedRange[0]) * 100}%`,
- right: `${100 - state.getValuePercent(selectedRange[1]) * 100}%`,
+ left: `${state.getValuePercent(selectedRange[0]!) * 100}%`,
+ right: `${100 - state.getValuePercent(selectedRange[1]!) * 100}%`,
}}
/>
@@ -245,7 +245,9 @@ function BaseSlider(
aria-hidden
error={error}
disabled={disabled}
- inSelection={tickValue >= selectedRange[0] && tickValue <= selectedRange[1]}
+ inSelection={
+ tickValue >= selectedRange[0]! && tickValue <= selectedRange[1]!
+ }
style={{left: `${(state.getValuePercent(tickValue) * 100).toFixed(2)}%`}}
justifyContent={
index === 0
diff --git a/static/app/components/slider/thumb.tsx b/static/app/components/slider/thumb.tsx
index 10965b10101676..1b26148b3723e2 100644
--- a/static/app/components/slider/thumb.tsx
+++ b/static/app/components/slider/thumb.tsx
@@ -51,7 +51,7 @@ function BaseSliderThumb(
transform: `translateX(${-state.getThumbPercent(index ?? 0) * 100}%)`,
}}
>
- {getFormattedValue(state.values[index ?? 0])}
+ {getFormattedValue(state.values[index ?? 0]!)}
)}
diff --git a/static/app/components/stream/group.tsx b/static/app/components/stream/group.tsx
index 7d897d25ea75a9..1268f8cb11862c 100644
--- a/static/app/components/stream/group.tsx
+++ b/static/app/components/stream/group.tsx
@@ -184,7 +184,7 @@ function BaseGroupRow({
const {period, start, end} = selection.datetime || {};
const summary =
- customStatsPeriod?.label.toLowerCase() ??
+ customStatsPeriod?.label?.toLowerCase() ??
(!!start && !!end
? 'time range'
: getRelativeSummary(period || DEFAULT_STATS_PERIOD).toLowerCase());
@@ -270,8 +270,8 @@ function BaseGroupRow({
}
return group.filtered
- ? group.filtered.stats?.[statsPeriod]
- : group.stats?.[statsPeriod];
+ ? group.filtered.stats?.[statsPeriod]!
+ : group.stats?.[statsPeriod]!;
}, [group, statsPeriod]);
const groupSecondaryStats = useMemo>(() => {
@@ -279,7 +279,7 @@ function BaseGroupRow({
return [];
}
- return group.filtered ? group.stats?.[statsPeriod] : [];
+ return group.filtered ? group.stats?.[statsPeriod]! : [];
}, [group, statsPeriod]);
if (!group) {
diff --git a/static/app/components/structuredEventData/collapsibleValue.tsx b/static/app/components/structuredEventData/collapsibleValue.tsx
index 3542369ba7fc72..8f9b0cef3b8adf 100644
--- a/static/app/components/structuredEventData/collapsibleValue.tsx
+++ b/static/app/components/structuredEventData/collapsibleValue.tsx
@@ -12,6 +12,7 @@ interface Props {
closeTag: string;
openTag: string;
path: string;
+ noBasePadding?: boolean;
prefix?: ReactNode;
}
@@ -21,6 +22,7 @@ export function CollapsibleValue({
openTag,
path,
prefix = null,
+ noBasePadding,
}: Props) {
const {collapse, expand, isExpanded: isInitiallyExpanded} = useExpandedState({path});
const [isExpanded, setIsExpanded] = useState(isInitiallyExpanded);
@@ -32,7 +34,7 @@ export function CollapsibleValue({
// Toggle buttons get placed to the left of the open tag, but if this is the
// base level there is no room for it. So we add padding in this case.
- const baseLevelPadding = isBaseLevel && shouldShowToggleButton;
+ const baseLevelPadding = isBaseLevel && shouldShowToggleButton && !noBasePadding;
return (
diff --git a/static/app/components/structuredEventData/recursiveStructuredData.tsx b/static/app/components/structuredEventData/recursiveStructuredData.tsx
index 9f93f5578f8c72..2089720299eb71 100644
--- a/static/app/components/structuredEventData/recursiveStructuredData.tsx
+++ b/static/app/components/structuredEventData/recursiveStructuredData.tsx
@@ -42,15 +42,15 @@ export function RecursiveStructuredData({
withOnlyFormattedText = false,
}: Props) {
let i = 0;
-
- const formattedObjectKey = objectKey ? (
-
-
- {config?.renderObjectKeys?.(objectKey) ?? objectKey}
-
- {': '}
-
- ) : null;
+ const formattedObjectKey =
+ objectKey !== undefined ? (
+
+
+ {config?.renderObjectKeys?.(objectKey) ?? objectKey}
+
+ {': '}
+
+ ) : null;
function Wrapper({children}: {children: React.ReactNode}) {
return (
@@ -207,16 +207,16 @@ export function RecursiveStructuredData({
const keys = Object.keys(value);
keys.sort(naturalCaseInsensitiveSort);
for (i = 0; i < keys.length; i++) {
- const key = keys[i];
+ const key = keys[i]!;
children.push(
{i < keys.length - 1 ?
{','} : null}
diff --git a/static/app/components/superuserStaffAccessForm.tsx b/static/app/components/superuserStaffAccessForm.tsx
index 8af0670facd4fd..8e852a72157f60 100644
--- a/static/app/components/superuserStaffAccessForm.tsx
+++ b/static/app/components/superuserStaffAccessForm.tsx
@@ -59,7 +59,7 @@ class SuperuserStaffAccessForm extends Component
{
}
const authenticators = await this.getAuthenticators();
- this.setState({authenticators: authenticators});
+ this.setState({authenticators});
// Set the error state if there are no authenticators and U2F is on
if (!authenticators.length && !disableU2FForSUForm) {
diff --git a/static/app/components/tabs/index.spec.tsx b/static/app/components/tabs/index.spec.tsx
index 162e96bc24914d..aa20811f34af49 100644
--- a/static/app/components/tabs/index.spec.tsx
+++ b/static/app/components/tabs/index.spec.tsx
@@ -47,11 +47,11 @@ describe('Tabs', () => {
});
// The first tab item is selected and its content visible
- expect(screen.getByRole('tab', {name: TABS[0].label})).toHaveAttribute(
+ expect(screen.getByRole('tab', {name: TABS[0]!.label})).toHaveAttribute(
'aria-selected',
'true'
);
- expect(screen.getByText(TABS[0].content)).toBeInTheDocument();
+ expect(screen.getByText(TABS[0]!.content)).toBeInTheDocument();
});
it('renders tabs list when disabled', () => {
@@ -71,11 +71,11 @@ describe('Tabs', () => {
);
// The first tab item is selected and its content visible
- expect(screen.getByRole('tab', {name: TABS[0].label})).toHaveAttribute(
+ expect(screen.getByRole('tab', {name: TABS[0]!.label})).toHaveAttribute(
'aria-selected',
'true'
);
- expect(screen.getByText(TABS[0].content)).toBeInTheDocument();
+ expect(screen.getByText(TABS[0]!.content)).toBeInTheDocument();
// All tabs are marked as disabled
TABS.forEach(tab => {
@@ -110,7 +110,7 @@ describe('Tabs', () => {
'aria-selected',
'true'
);
- expect(screen.getByText(TABS[1].content)).toBeInTheDocument();
+ expect(screen.getByText(TABS[1]!.content)).toBeInTheDocument();
});
it('changes tabs using keyboard navigation', async () => {
@@ -141,7 +141,7 @@ describe('Tabs', () => {
'aria-selected',
'true'
);
- expect(screen.getByText(TABS[1].content)).toBeInTheDocument();
+ expect(screen.getByText(TABS[1]!.content)).toBeInTheDocument();
});
it('changes tabs on key press in vertical orientation', async () => {
@@ -172,7 +172,7 @@ describe('Tabs', () => {
'aria-selected',
'true'
);
- expect(screen.getByText(TABS[1].content)).toBeInTheDocument();
+ expect(screen.getByText(TABS[1]!.content)).toBeInTheDocument();
});
it('renders disabled tabs', () => {
@@ -230,7 +230,7 @@ describe('Tabs', () => {
// Command/ctrl/shift-clicking on a tab link doesn't change the tab selection.
// The expected behavior is that clicking on a tab link will open a new browser
// tab/window. The current view shouldn't update.
- const secondTabEl = screen.getByRole('tab', {name: TABS[1].label});
+ const secondTabEl = screen.getByRole('tab', {name: TABS[1]!.label});
const secondTabLink = within(secondTabEl).getByRole('link', {hidden: true});
const user = userEvent.setup();
@@ -247,7 +247,7 @@ describe('Tabs', () => {
await user.click(secondTabLink);
await user.keyboard('[/ShiftLeft]');
- expect(screen.getByRole('tab', {name: TABS[0].label})).toHaveAttribute(
+ expect(screen.getByRole('tab', {name: TABS[0]!.label})).toHaveAttribute(
'aria-selected',
'true'
);
diff --git a/static/app/components/tabs/index.tsx b/static/app/components/tabs/index.tsx
index cbe934be1b99af..d196786b5c5277 100644
--- a/static/app/components/tabs/index.tsx
+++ b/static/app/components/tabs/index.tsx
@@ -47,7 +47,7 @@ export interface TabsProps
value?: T;
}
-interface TabContext {
+export interface TabContext {
rootProps: Omit, 'children' | 'className'>;
setTabListState: (state: TabListState) => void;
tabListState?: TabListState;
diff --git a/static/app/components/tagDistributionMeter.tsx b/static/app/components/tagDistributionMeter.tsx
index 5aa84d0715ccec..91b892f0f4aa18 100644
--- a/static/app/components/tagDistributionMeter.tsx
+++ b/static/app/components/tagDistributionMeter.tsx
@@ -55,7 +55,7 @@ function TagDistributionMeter({
);
}
- const largestSegment = segments[0];
+ const largestSegment = segments[0]!;
const pct = percent(largestSegment.count, totalValues);
const pctLabel = Math.floor(pct);
const renderLabel = () => {
@@ -140,7 +140,7 @@ function TagDistributionMeter({
{value.isOther ? (
) : (
)}
diff --git a/static/app/components/teamRoleSelect.tsx b/static/app/components/teamRoleSelect.tsx
index b7bb3732455e8c..1b58824181c4ef 100644
--- a/static/app/components/teamRoleSelect.tsx
+++ b/static/app/components/teamRoleSelect.tsx
@@ -56,7 +56,7 @@ function TeamRoleSelect({
member.teamRole || // From TeamMemberEndpoint
member.teamRoles?.find(tr => tr.teamSlug === team.slug)?.role || // From OrgMemberDetailEndpoint
null;
- const teamRole = teamRoleList.find(r => r.id === teamRoleId) || teamRoleList[0];
+ const teamRole = teamRoleList.find(r => r.id === teamRoleId) || teamRoleList[0]!;
return (
TeamFixture(data));
-const project = ProjectFixture({teams: [teams[0]]});
+const project = ProjectFixture({teams: [teams[0]!]});
const organization = OrganizationFixture({access: ['project:write']});
act(() => OrganizationStore.onUpdate(organization, {replace: true}));
@@ -96,7 +96,7 @@ describe('Team Selector', function () {
expect(screen.getByText('#team1')).toBeInTheDocument();
// team2 and team3 should have add to project buttons
- expect(screen.getAllByRole('button').length).toBe(2);
+ expect(screen.getAllByRole('button')).toHaveLength(2);
});
it('respects the team and project filter', async function () {
@@ -110,7 +110,7 @@ describe('Team Selector', function () {
expect(screen.queryByText('#team3')).not.toBeInTheDocument();
// team2 should have add to project buttons
- expect(screen.getAllByRole('button').length).toBe(1);
+ expect(screen.getAllByRole('button')).toHaveLength(1);
});
it('allows you to add teams outside of project', async function () {
@@ -122,7 +122,7 @@ describe('Team Selector', function () {
// team2 and team3 should have add to project buttons
const addToProjectButtons = screen.getAllByRole('button');
- await userEvent.click(addToProjectButtons[0]);
+ await userEvent.click(addToProjectButtons[0]!);
expect(addTeamToProject).toHaveBeenCalled();
});
diff --git a/static/app/components/textCopyInput.spec.tsx b/static/app/components/textCopyInput.spec.tsx
index 01dc466730c96c..2648deff824b70 100644
--- a/static/app/components/textCopyInput.spec.tsx
+++ b/static/app/components/textCopyInput.spec.tsx
@@ -1,10 +1,43 @@
-import {render, screen} from 'sentry-test/reactTestingLibrary';
+import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
import TextCopyInput from 'sentry/components/textCopyInput';
describe('TextCopyInput', function () {
- it('renders', function () {
+ beforeEach(() => {
+ Object.assign(navigator, {
+ clipboard: {
+ writeText: jest.fn().mockResolvedValue(''),
+ },
+ });
+ });
+
+ it('copies text to clipboard on click', async function () {
+ render(Text to Copy );
+ const button = screen.getByRole('button', {name: 'Copy'});
+ expect(button).toBeInTheDocument();
+
+ await userEvent.click(button);
+
+ expect(navigator.clipboard.writeText).toHaveBeenCalledWith('Text to Copy');
+ });
+
+ it('selects text in input on click', async function () {
render(Text to Copy );
- expect(screen.getByDisplayValue('Text to Copy')).toBeInTheDocument();
+ const input = screen.getByRole('textbox');
+ expect(input).toHaveValue('Text to Copy');
+ const selectSpy = jest.spyOn(input, 'select');
+
+ await userEvent.click(input);
+
+ expect(selectSpy).toHaveBeenCalled();
+ });
+
+ it('handles RTL text selection', async function () {
+ render(Text to Copy );
+ const input = screen.getByRole('textbox');
+ const setSelectionRangeSpy = jest.spyOn(input, 'setSelectionRange');
+
+ await userEvent.click(input);
+ expect(setSelectionRangeSpy).toHaveBeenCalledWith(1, input.value.length - 1);
});
});
diff --git a/static/app/components/textCopyInput.tsx b/static/app/components/textCopyInput.tsx
index 5b32a3fb84981b..c21915c274b4c7 100644
--- a/static/app/components/textCopyInput.tsx
+++ b/static/app/components/textCopyInput.tsx
@@ -1,5 +1,4 @@
-import {useCallback, useRef} from 'react';
-import {findDOMNode} from 'react-dom';
+import {useCallback, useId} from 'react';
import styled from '@emotion/styled';
import {CopyToClipboardButton} from 'sentry/components/copyToClipboardButton';
@@ -33,27 +32,21 @@ function TextCopyInput({
children,
...inputProps
}: Props) {
- const textRef = useRef(null);
+ const textNodeId = useId();
const handleSelectText = useCallback(() => {
- if (!textRef.current) {
+ const node = document.getElementById(textNodeId) as HTMLInputElement | null;
+ if (!node) {
return;
}
- // We use findDOMNode here because `this.textRef` is not a dom node,
- // it's a ref to AutoSelectText
- const node = findDOMNode(textRef.current); // eslint-disable-line react/no-find-dom-node
- if (!node || !(node instanceof HTMLElement)) {
- return;
- }
-
- if (rtl && node instanceof HTMLInputElement) {
+ if (rtl) {
// we don't want to select the first character - \u202A, nor the last - \u202C
node.setSelectionRange(1, node.value.length - 1);
} else {
selectText(node);
}
- }, [rtl]);
+ }, [rtl, textNodeId]);
/**
* We are using direction: rtl; to always show the ending of a long overflowing text in input.
@@ -68,9 +61,9 @@ function TextCopyInput({
return (
{
throw new Error('Invalid stats period');
}
- const value = parseInt(result[1], 10);
+ const value = parseInt(result[1]!, 10);
const unit = result[2] as RelativePeriodUnit;
return {
@@ -94,7 +94,7 @@ export function parseStatsPeriod(
): {end: string; start: string} {
const {value, unit} = parseStatsPeriodString(statsPeriod);
- const momentUnit = SUPPORTED_RELATIVE_PERIOD_UNITS[unit].momentUnit;
+ const momentUnit = SUPPORTED_RELATIVE_PERIOD_UNITS[unit]!.momentUnit;
const format = outputFormat === null ? undefined : outputFormat;
@@ -126,7 +126,7 @@ export function getRelativeSummary(
const {value, unit} = parseStatsPeriodString(relative);
- return SUPPORTED_RELATIVE_PERIOD_UNITS[unit].label(value);
+ return SUPPORTED_RELATIVE_PERIOD_UNITS[unit]!.label(value);
} catch {
return 'Invalid period';
}
@@ -189,7 +189,7 @@ function timePeriodIsWithinLimit({
return true;
}
- const daysMultiplier = supportedPeriods[unit].convertToDaysMultiplier;
+ const daysMultiplier = supportedPeriods[unit]!.convertToDaysMultiplier;
const numberOfDays = amount * daysMultiplier;
return numberOfDays <= maxDays;
@@ -246,7 +246,7 @@ export const _timeRangeAutoCompleteFilter = function
- makeItem(userSuppliedAmount, unit, supportedPeriods[unit].label, index)
+ makeItem(userSuppliedAmount, unit, supportedPeriods[unit]!.label, index)
);
}
@@ -257,7 +257,7 @@ export const _timeRangeAutoCompleteFilter = function "8 Days")
const {value, unit} = parseStatsPeriodString(arbitraryPeriod);
- return {[arbitraryPeriod]: SUPPORTED_RELATIVE_PERIOD_UNITS[unit].label(value)};
+ return {[arbitraryPeriod]: SUPPORTED_RELATIVE_PERIOD_UNITS[unit]!.label(value)};
}
/**
diff --git a/static/app/components/timeline/index.tsx b/static/app/components/timeline/index.tsx
index 44b57780b2d25d..fb7b8b33980a5d 100644
--- a/static/app/components/timeline/index.tsx
+++ b/static/app/components/timeline/index.tsx
@@ -27,7 +27,7 @@ export interface TimelineItemProps {
timestamp?: React.ReactNode;
}
-export const Item = forwardRef(function _Item(
+export const Item = forwardRef(function ItemInner(
{
title,
children,
diff --git a/static/app/components/tooltip.spec.tsx b/static/app/components/tooltip.spec.tsx
index 08544dd20aa2d8..6889317a0968cc 100644
--- a/static/app/components/tooltip.spec.tsx
+++ b/static/app/components/tooltip.spec.tsx
@@ -25,12 +25,23 @@ describe('Tooltip', function () {
jest.clearAllMocks();
});
- it('renders', function () {
+ it('renders', async function () {
render(
My Button
);
+
+ await userEvent.hover(screen.getByText('My Button'));
+ expect(screen.getByText('test')).toBeInTheDocument();
+
+ // Check that the arrow svg is rendered
+ expect(document.querySelector('svg')).toBeInTheDocument();
+
+ await userEvent.unhover(screen.getByText('My Button'));
+ await waitFor(() => {
+ expect(screen.queryByText('test')).not.toBeInTheDocument();
+ });
});
it('updates title', async function () {
diff --git a/static/app/components/tooltip.tsx b/static/app/components/tooltip.tsx
index 9c0903fb5b07d4..ff54ec98321ffa 100644
--- a/static/app/components/tooltip.tsx
+++ b/static/app/components/tooltip.tsx
@@ -1,6 +1,5 @@
import {createContext, Fragment, useContext, useEffect} from 'react';
import {createPortal} from 'react-dom';
-import isPropValid from '@emotion/is-prop-valid';
import type {SerializedStyles} from '@emotion/react';
import {useTheme} from '@emotion/react';
import styled from '@emotion/styled';
@@ -92,9 +91,9 @@ function Tooltip({
);
}
-const TooltipContent = styled(Overlay, {shouldForwardProp: isPropValid})<{
- maxWidth?: number;
-}>`
+const TooltipContent = styled(Overlay, {
+ shouldForwardProp: prop => prop !== 'maxWidth',
+})<{maxWidth?: number}>`
padding: ${space(1)} ${space(1.5)};
overflow-wrap: break-word;
max-width: ${p => p.maxWidth ?? 225}px;
diff --git a/static/app/components/truncate.tsx b/static/app/components/truncate.tsx
index f43e9dc0feeb79..de21e0f900e78a 100644
--- a/static/app/components/truncate.tsx
+++ b/static/app/components/truncate.tsx
@@ -55,7 +55,7 @@ function Truncate({
} else if (trimRegex && !leftTrim) {
const matches = slicedValue.match(trimRegex);
let lastIndex = matches
- ? slicedValue.lastIndexOf(matches[matches.length - 1]) + 1
+ ? slicedValue.lastIndexOf(matches[matches.length - 1]!) + 1
: slicedValue.length;
if (lastIndex <= minLength) {
lastIndex = slicedValue.length;
diff --git a/static/app/components/u2f/u2finterface.tsx b/static/app/components/u2f/u2finterface.tsx
index 459ef8a0b9f020..3c82705ad19052 100644
--- a/static/app/components/u2f/u2finterface.tsx
+++ b/static/app/components/u2f/u2finterface.tsx
@@ -56,14 +56,12 @@ class U2fInterface extends Component {
componentDidMount() {
const supported = !!window.PublicKeyCredential;
- // eslint-disable-next-line react/no-did-mount-set-state
this.setState({isSupported: supported});
const isSafari =
navigator.userAgent.includes('Safari') && !navigator.userAgent.includes('Chrome');
if (isSafari) {
- // eslint-disable-next-line react/no-did-mount-set-state
this.setState({
deviceFailure: 'safari: requires interaction',
isSafari,
diff --git a/static/app/components/updatedEmptyState.tsx b/static/app/components/updatedEmptyState.tsx
index 201cd70007fb71..d827226f914ae6 100644
--- a/static/app/components/updatedEmptyState.tsx
+++ b/static/app/components/updatedEmptyState.tsx
@@ -91,7 +91,7 @@ export default function UpdatedEmptyState({project}: {project?: Project}) {
};
}
- const install = loadGettingStarted.docs.onboarding.install(docParams)[0];
+ const install = loadGettingStarted.docs.onboarding.install(docParams)[0]!;
const configure = loadGettingStarted.docs.onboarding.configure(docParams);
const verify = loadGettingStarted.docs.onboarding.verify(docParams);
@@ -200,9 +200,9 @@ export default function UpdatedEmptyState({project}: {project?: Project}) {
{configuration.configurations &&
configuration.configurations.length > 0 ? (
- Array.isArray(configuration.configurations[0].code) ? (
+ Array.isArray(configuration.configurations[0]!.code) ? (
) : null
) : null}
diff --git a/static/app/components/userMisery.tsx b/static/app/components/userMisery.tsx
index eca100d6ac9255..fafdd065cdff4c 100644
--- a/static/app/components/userMisery.tsx
+++ b/static/app/components/userMisery.tsx
@@ -21,7 +21,7 @@ function UserMisery(props: Props) {
// 0 User Misery while still preserving the actual value for sorting purposes.
const adjustedMisery = userMisery > 0.05 ? userMisery : 0;
- const palette = new Array(bars).fill([CHART_PALETTE[0][0]]);
+ const palette = new Array(bars).fill([CHART_PALETTE[0]![0]]);
const score = Math.round(adjustedMisery * palette.length);
let title: React.ReactNode;
diff --git a/static/app/constants/chartPalette.tsx b/static/app/constants/chartPalette.tsx
index be738abb6bc395..4df1b1f597d4dc 100644
--- a/static/app/constants/chartPalette.tsx
+++ b/static/app/constants/chartPalette.tsx
@@ -176,4 +176,4 @@ export const CHART_PALETTE = [
'#f4aa27',
'#f2b712',
],
-] as string[][];
+];
diff --git a/static/app/constants/index.tsx b/static/app/constants/index.tsx
index 0d6200f2773da9..3939fe5bcae96a 100644
--- a/static/app/constants/index.tsx
+++ b/static/app/constants/index.tsx
@@ -121,8 +121,8 @@ export type PermissionChoice = {
type PermissionObj = {
choices: {
- admin: PermissionChoice;
'no-access': PermissionChoice;
+ admin?: PermissionChoice;
read?: PermissionChoice;
write?: PermissionChoice;
};
@@ -199,6 +199,15 @@ export const SENTRY_APP_PERMISSIONS: PermissionObj[] = [
admin: {label: 'Admin', scopes: ['member:read', 'member:write', 'member:admin']},
},
},
+ {
+ resource: 'Alerts',
+ help: 'Manage Alerts',
+ choices: {
+ 'no-access': {label: 'No Access', scopes: []},
+ read: {label: 'Read', scopes: ['alerts:read']},
+ write: {label: 'Read & Write', scopes: ['alerts:read', 'alerts:write']},
+ },
+ },
];
export const DEFAULT_TOAST_DURATION = 6000;
diff --git a/static/app/constants/ios-device-list.tsx b/static/app/constants/ios-device-list.tsx
index ca53d7e104523f..d6cfb100677872 100644
--- a/static/app/constants/ios-device-list.tsx
+++ b/static/app/constants/ios-device-list.tsx
@@ -3,7 +3,9 @@
// the purpose of the script is to extract only the iOS information that Sentry cares about
// and discard the rest of the JSON so we do not end up bloating bundle size.
+// see https://theapplewiki.com/wiki/models
const iOSDeviceMapping: Record = {
+ // iPod touch
'iPod1,1': 'iPod touch',
'iPod2,1': 'iPod touch (2nd generation)',
'iPod3,1': 'iPod touch (3rd generation)',
@@ -11,6 +13,7 @@ const iOSDeviceMapping: Record = {
'iPod5,1': 'iPod touch (5th generation)',
'iPod7,1': 'iPod touch (6th generation)',
'iPod9,1': 'iPod touch (7th generation)',
+ // iPhone
'iPhone1,1': 'iPhone',
'iPhone1,2': 'iPhone 3G',
'iPhone2,1': 'iPhone 3GS',
@@ -64,6 +67,11 @@ const iOSDeviceMapping: Record = {
'iPhone15,5': 'iPhone 15 Plus',
'iPhone16,1': 'iPhone 15 Pro',
'iPhone16,2': 'iPhone 15 Pro Max',
+ 'iPhone17,1': 'iPhone 16 Pro',
+ 'iPhone17,2': 'iPhone 16 Pro Max',
+ 'iPhone17,3': 'iPhone 16',
+ 'iPhone17,4': 'iPhone 16 Plus',
+ // iPad Pro
'iPad6,7': 'iPad Pro (12.9-inch)',
'iPad6,8': 'iPad Pro (12.9-inch)',
'iPad6,3': 'iPad Pro (9.7-inch)',
@@ -96,6 +104,11 @@ const iOSDeviceMapping: Record = {
'iPad13,11': 'iPad Pro (12.9-inch) (5th generation)',
'iPad14,5': 'iPad Pro (12.9-inch) (6th generation)',
'iPad14,6': 'iPad Pro (12.9-inch) (6th generation)',
+ 'iPad16,3': 'iPad Pro (11-inch) (5th generation)',
+ 'iPad16,4': 'iPad Pro (11-inch) (5th generation)',
+ 'iPad16,5': 'iPad Pro (12.9-inch) (7th generation)',
+ 'iPad16,6': 'iPad Pro (12.9-inch) (7th generation)',
+ // iPad mini
'iPad2,5': 'iPad mini',
'iPad2,6': 'iPad mini',
'iPad2,7': 'iPad mini',
@@ -111,6 +124,7 @@ const iOSDeviceMapping: Record = {
'iPad11,2': 'iPad mini (5th generation)',
'iPad14,1': 'iPad mini (6th generation)',
'iPad14,2': 'iPad mini (6th generation)',
+ // iPad Air
'iPad4,1': 'iPad Air',
'iPad4,2': 'iPad Air',
'iPad4,3': 'iPad Air',
@@ -122,6 +136,11 @@ const iOSDeviceMapping: Record = {
'iPad13,2': 'iPad Air (4th generation)',
'iPad13,16': 'iPad Air (5th generation)',
'iPad13,17': 'iPad Air (5th generation)',
+ 'iPad14,8': 'iPad Air (6th generation)',
+ 'iPad14,9': 'iPad Air (6th generation)',
+ 'iPad14,10': 'iPad Air (7th generation)',
+ 'iPad14,11': 'iPad Air (7th generation)',
+ // iPad
'iPad1,1': 'iPad',
'iPad2,1': 'iPad 2',
'iPad2,2': 'iPad 2',
@@ -145,9 +164,7 @@ const iOSDeviceMapping: Record = {
'iPad12,2': 'iPad (9th generation)',
'iPad13,18': 'iPad (10th generation)',
'iPad13,19': 'iPad (10th generation)',
- 'AudioAccessory1,1': 'HomePod',
- 'AudioAccessory1,2': 'HomePod',
- 'AudioAccessory5,1': 'HomePod mini',
+ // Apple Watch
'Watch1,1': 'Apple Watch (1st generation)',
'Watch1,2': 'Apple Watch (1st generation)',
'Watch2,6': 'Apple Watch Series 1',
@@ -192,7 +209,11 @@ const iOSDeviceMapping: Record = {
'Watch7,4': 'Apple Watch Series 9',
'Watch7,3': 'Apple Watch Series 9',
'Watch7,5': 'Apple Watch Ultra 2',
-
+ 'Watch7,8': 'Apple Watch Series 10',
+ 'Watch7,9': 'Apple Watch Series 10',
+ 'Watch7,10': 'Apple Watch Series 10',
+ 'Watch7,11': 'Apple Watch Series 10',
+ // Apple TV
'AppleTV1,1': 'Apple TV (1st generation)',
'AppleTV2,1': 'Apple TV (2nd generation)',
'AppleTV3,1': 'Apple TV (3rd generation)',
@@ -201,13 +222,6 @@ const iOSDeviceMapping: Record = {
'AppleTV6,2': 'Apple TV 4K',
'AppleTV11,1': 'Apple TV 4K (2nd generation)',
'AppleTV14,1': 'Apple TV 4K (3rd generation)',
- 'AirTag1,1': 'AirTag',
- 'AirPods1,1': 'AirPods (1st generation)',
- 'AirPods2,1,AirPods2,1': 'AirPods (2nd generation)',
- 'AirPods1,3,Audio2,1': 'AirPods (3rd generation)',
- 'AirPods2,2,AirPodsPro1,1,iProd8,1': 'AirPods Pro',
- 'AirPodsPro1,2': 'AirPods Pro (2nd generation)',
- 'AirPodsMax1,1,iProd8,6': 'AirPods Max',
};
export {iOSDeviceMapping};
diff --git a/static/app/data/forms/accountPreferences.tsx b/static/app/data/forms/accountPreferences.tsx
index 7a073f4557f5b1..39e7d27de54f44 100644
--- a/static/app/data/forms/accountPreferences.tsx
+++ b/static/app/data/forms/accountPreferences.tsx
@@ -2,6 +2,7 @@ import type {JsonFormObject} from 'sentry/components/forms/types';
import languages from 'sentry/data/languages';
import {timezoneOptions} from 'sentry/data/timezones';
import {t} from 'sentry/locale';
+import {removeBodyTheme} from 'sentry/utils/removeBodyTheme';
// Export route to make these forms searchable by label/help
export const route = '/settings/account/details/';
@@ -28,6 +29,9 @@ const formGroups: JsonFormObject[] = [
{value: 'system', label: t('Default to system')},
],
getData: transformOptions,
+ onChange: () => {
+ removeBodyTheme();
+ },
},
{
name: 'language',
diff --git a/static/app/data/forms/inboundFilters.tsx b/static/app/data/forms/inboundFilters.tsx
index c82ad9aa5d709a..2ee1c4d994b64d 100644
--- a/static/app/data/forms/inboundFilters.tsx
+++ b/static/app/data/forms/inboundFilters.tsx
@@ -77,7 +77,8 @@ export const customFilterFields: Field[] = [
help: (
{t('Filter events by error messages. ')}
- {newLineHelpText} {globHelpText}
+ {newLineHelpText} {globHelpText}{' '}
+ {t('Exceptions are matched on ": ", for example "TypeError: *".')}
),
getData: getOptionsData,
diff --git a/static/app/data/forms/organizationMembershipSettings.tsx b/static/app/data/forms/organizationMembershipSettings.tsx
index 7cb6e01b93e31f..ec03cf7d5aaecb 100644
--- a/static/app/data/forms/organizationMembershipSettings.tsx
+++ b/static/app/data/forms/organizationMembershipSettings.tsx
@@ -32,7 +32,6 @@ const formGroups: JsonFormObject[] = [
help: t(
'Allow organization members to invite other members via email without needing org owner or manager approval.'
),
- visible: ({features}) => features.has('members-invite-teammates'),
},
{
name: 'allowMemberProjectCreation',
diff --git a/static/app/data/forms/projectGeneralSettings.tsx b/static/app/data/forms/projectGeneralSettings.tsx
index 68d4a760653e89..236a972aeabfac 100644
--- a/static/app/data/forms/projectGeneralSettings.tsx
+++ b/static/app/data/forms/projectGeneralSettings.tsx
@@ -2,6 +2,7 @@ import {createFilter} from 'react-select';
import styled from '@emotion/styled';
import {PlatformIcon} from 'platformicons';
+import {hasEveryAccess} from 'sentry/components/acl/access';
import type {Field} from 'sentry/components/forms/types';
import platforms from 'sentry/data/platforms';
import {t, tct, tn} from 'sentry/locale';
@@ -149,7 +150,8 @@ export const fields: Record = {
name: 'scrapeJavaScript',
type: 'boolean',
// if this is off for the organization, it cannot be enabled for the project
- disabled: ({organization, name}) => !organization[name],
+ disabled: ({organization, project, name}) =>
+ !organization[name] || !hasEveryAccess(['project:write'], {organization, project}),
disabledReason: ORG_DISABLED_REASON,
// `props` are the props given to FormField
setValue: (val, props) => props.organization?.[props.name] && val,
diff --git a/static/app/gettingStartedDocs/android/android.tsx b/static/app/gettingStartedDocs/android/android.tsx
index a59faeddb9bab9..653f52e5ddb29a 100644
--- a/static/app/gettingStartedDocs/android/android.tsx
+++ b/static/app/gettingStartedDocs/android/android.tsx
@@ -11,7 +11,6 @@ import type {
OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {MobileBetaBanner} from 'sentry/components/onboarding/gettingStartedDoc/utils';
-import {getAndroidMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
import {
getReplayMobileConfigureDescription,
getReplayVerifyStep,
@@ -445,7 +444,6 @@ const docs: Docs = {
onboarding,
feedbackOnboardingCrashApi: feedbackOnboardingCrashApiJava,
crashReportOnboarding: feedbackOnboardingCrashApiJava,
- customMetricsOnboarding: getAndroidMetricsOnboarding(),
platformOptions,
replayOnboarding,
};
diff --git a/static/app/gettingStartedDocs/bun/bun.tsx b/static/app/gettingStartedDocs/bun/bun.tsx
index 57e8a22dc10e1c..11987a8a0bdde4 100644
--- a/static/app/gettingStartedDocs/bun/bun.tsx
+++ b/static/app/gettingStartedDocs/bun/bun.tsx
@@ -13,7 +13,10 @@ import {
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
import exampleSnippets from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsExampleSnippets';
import {metricTagsExplanation} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
type Params = DocsParams;
@@ -207,6 +210,7 @@ const docs: Docs = {
replayOnboardingJsLoader,
customMetricsOnboarding,
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/deno/deno.tsx b/static/app/gettingStartedDocs/deno/deno.tsx
index 2303cf885e5a75..640f952f9d9c0f 100644
--- a/static/app/gettingStartedDocs/deno/deno.tsx
+++ b/static/app/gettingStartedDocs/deno/deno.tsx
@@ -5,7 +5,10 @@ import type {
DocsParams,
OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
type Params = DocsParams;
@@ -175,6 +178,7 @@ const docs: Docs = {
onboarding,
replayOnboardingJsLoader,
customMetricsOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/dotnet/aspnet.tsx b/static/app/gettingStartedDocs/dotnet/aspnet.tsx
index 394a6781c2be19..6f2f40ef588a7c 100644
--- a/static/app/gettingStartedDocs/dotnet/aspnet.tsx
+++ b/static/app/gettingStartedDocs/dotnet/aspnet.tsx
@@ -15,7 +15,10 @@ import {
getCrashReportSDKInstallFirstStep,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
import {getDotnetMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
import {getPackageVersion} from 'sentry/utils/gettingStartedDocs/getPackageVersion';
@@ -228,6 +231,7 @@ const docs: Docs = {
replayOnboardingJsLoader,
customMetricsOnboarding: getDotnetMetricsOnboarding({packageName: 'Sentry.AspNet'}),
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/dotnet/aspnetcore.tsx b/static/app/gettingStartedDocs/dotnet/aspnetcore.tsx
index 963e25db4e37c6..b8ce77f4697d40 100644
--- a/static/app/gettingStartedDocs/dotnet/aspnetcore.tsx
+++ b/static/app/gettingStartedDocs/dotnet/aspnetcore.tsx
@@ -15,7 +15,10 @@ import {
getCrashReportSDKInstallFirstStep,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
import {getDotnetMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
import {getPackageVersion} from 'sentry/utils/gettingStartedDocs/getPackageVersion';
@@ -313,6 +316,7 @@ const docs: Docs = {
replayOnboardingJsLoader,
customMetricsOnboarding: getDotnetMetricsOnboarding({packageName: 'Sentry.AspNetCore'}),
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/elixir/elixir.tsx b/static/app/gettingStartedDocs/elixir/elixir.tsx
index 689819e8801ca3..edea378891329e 100644
--- a/static/app/gettingStartedDocs/elixir/elixir.tsx
+++ b/static/app/gettingStartedDocs/elixir/elixir.tsx
@@ -11,7 +11,10 @@ import {
getCrashReportModalConfigDescription,
getCrashReportModalIntroduction,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
type Params = DocsParams;
@@ -179,6 +182,7 @@ const docs: Docs = {
onboarding,
replayOnboardingJsLoader,
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/go/echo.tsx b/static/app/gettingStartedDocs/go/echo.tsx
index 173b604def9756..6a975ce40400b5 100644
--- a/static/app/gettingStartedDocs/go/echo.tsx
+++ b/static/app/gettingStartedDocs/go/echo.tsx
@@ -14,7 +14,10 @@ import {
getCrashReportModalConfigDescription,
getCrashReportModalIntroduction,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
type Params = DocsParams;
@@ -232,6 +235,7 @@ const docs: Docs = {
onboarding,
replayOnboardingJsLoader,
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/go/fasthttp.tsx b/static/app/gettingStartedDocs/go/fasthttp.tsx
index 1c66d8fa9e4cb8..0d70c509a8ea99 100644
--- a/static/app/gettingStartedDocs/go/fasthttp.tsx
+++ b/static/app/gettingStartedDocs/go/fasthttp.tsx
@@ -14,7 +14,10 @@ import {
getCrashReportModalConfigDescription,
getCrashReportModalIntroduction,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
type Params = DocsParams;
@@ -237,6 +240,7 @@ const docs: Docs = {
onboarding,
replayOnboardingJsLoader,
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/go/fiber.tsx b/static/app/gettingStartedDocs/go/fiber.tsx
index e2a48887e3700e..8e2a5ba4571483 100644
--- a/static/app/gettingStartedDocs/go/fiber.tsx
+++ b/static/app/gettingStartedDocs/go/fiber.tsx
@@ -14,7 +14,10 @@ import {
getCrashReportModalConfigDescription,
getCrashReportModalIntroduction,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
type Params = DocsParams;
@@ -242,6 +245,7 @@ const docs: Docs = {
onboarding,
replayOnboardingJsLoader,
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/go/gin.tsx b/static/app/gettingStartedDocs/go/gin.tsx
index 3fc2d7edbfc015..7fde4071eb71d4 100644
--- a/static/app/gettingStartedDocs/go/gin.tsx
+++ b/static/app/gettingStartedDocs/go/gin.tsx
@@ -14,7 +14,10 @@ import {
getCrashReportModalConfigDescription,
getCrashReportModalIntroduction,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
type Params = DocsParams;
@@ -224,6 +227,7 @@ const docs: Docs = {
onboarding,
replayOnboardingJsLoader,
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/go/go.tsx b/static/app/gettingStartedDocs/go/go.tsx
index ccc25a4e4ccdaf..7c879a69e43f31 100644
--- a/static/app/gettingStartedDocs/go/go.tsx
+++ b/static/app/gettingStartedDocs/go/go.tsx
@@ -9,7 +9,10 @@ import {
getCrashReportModalConfigDescription,
getCrashReportModalIntroduction,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
type Params = DocsParams;
@@ -135,6 +138,7 @@ const docs: Docs = {
onboarding,
replayOnboardingJsLoader,
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/go/http.tsx b/static/app/gettingStartedDocs/go/http.tsx
index dba8624e4517ea..1b13d957898a89 100644
--- a/static/app/gettingStartedDocs/go/http.tsx
+++ b/static/app/gettingStartedDocs/go/http.tsx
@@ -14,7 +14,10 @@ import {
getCrashReportModalConfigDescription,
getCrashReportModalIntroduction,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
type Params = DocsParams;
@@ -232,6 +235,7 @@ const docs: Docs = {
onboarding,
replayOnboardingJsLoader,
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/go/iris.tsx b/static/app/gettingStartedDocs/go/iris.tsx
index 0e6a34e74b9fe1..255fb375812e91 100644
--- a/static/app/gettingStartedDocs/go/iris.tsx
+++ b/static/app/gettingStartedDocs/go/iris.tsx
@@ -14,7 +14,10 @@ import {
getCrashReportModalConfigDescription,
getCrashReportModalIntroduction,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
type Params = DocsParams;
@@ -221,6 +224,7 @@ const docs: Docs = {
onboarding,
replayOnboardingJsLoader,
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/go/martini.tsx b/static/app/gettingStartedDocs/go/martini.tsx
index 63b00ea361dd74..845137d78a5ce6 100644
--- a/static/app/gettingStartedDocs/go/martini.tsx
+++ b/static/app/gettingStartedDocs/go/martini.tsx
@@ -14,7 +14,10 @@ import {
getCrashReportModalConfigDescription,
getCrashReportModalIntroduction,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
type Params = DocsParams;
@@ -219,6 +222,7 @@ const docs: Docs = {
onboarding,
replayOnboardingJsLoader,
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/go/negroni.tsx b/static/app/gettingStartedDocs/go/negroni.tsx
index ac3c384214311f..163f8c9b11625f 100644
--- a/static/app/gettingStartedDocs/go/negroni.tsx
+++ b/static/app/gettingStartedDocs/go/negroni.tsx
@@ -14,7 +14,10 @@ import {
getCrashReportModalConfigDescription,
getCrashReportModalIntroduction,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
type Params = DocsParams;
@@ -283,6 +286,7 @@ const docs: Docs = {
onboarding,
replayOnboardingJsLoader,
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/java/java.tsx b/static/app/gettingStartedDocs/java/java.tsx
index f236144f27b656..060ee5b67d0cb5 100644
--- a/static/app/gettingStartedDocs/java/java.tsx
+++ b/static/app/gettingStartedDocs/java/java.tsx
@@ -13,7 +13,6 @@ import {
getCrashReportApiIntroduction,
getCrashReportInstallDescription,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
-import {getJavaMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
import {t, tct} from 'sentry/locale';
import {getPackageVersion} from 'sentry/utils/gettingStartedDocs/getPackageVersion';
@@ -343,7 +342,6 @@ const docs: Docs = {
platformOptions,
feedbackOnboardingCrashApi: feedbackOnboardingCrashApiJava,
crashReportOnboarding: feedbackOnboardingCrashApiJava,
- customMetricsOnboarding: getJavaMetricsOnboarding(),
onboarding,
};
diff --git a/static/app/gettingStartedDocs/java/log4j2.tsx b/static/app/gettingStartedDocs/java/log4j2.tsx
index 74535e7bff903b..b81d4034a124c0 100644
--- a/static/app/gettingStartedDocs/java/log4j2.tsx
+++ b/static/app/gettingStartedDocs/java/log4j2.tsx
@@ -9,7 +9,6 @@ import type {
DocsParams,
OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
-import {getJavaMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
import {feedbackOnboardingCrashApiJava} from 'sentry/gettingStartedDocs/java/java';
import {t, tct} from 'sentry/locale';
import {getPackageVersion} from 'sentry/utils/gettingStartedDocs/getPackageVersion';
@@ -333,7 +332,6 @@ const docs: Docs = {
platformOptions,
feedbackOnboardingCrashApi: feedbackOnboardingCrashApiJava,
crashReportOnboarding: feedbackOnboardingCrashApiJava,
- customMetricsOnboarding: getJavaMetricsOnboarding(),
onboarding,
};
diff --git a/static/app/gettingStartedDocs/java/logback.tsx b/static/app/gettingStartedDocs/java/logback.tsx
index 909724e27d39cb..f32b0ee76a26e4 100644
--- a/static/app/gettingStartedDocs/java/logback.tsx
+++ b/static/app/gettingStartedDocs/java/logback.tsx
@@ -9,7 +9,6 @@ import type {
DocsParams,
OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
-import {getJavaMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
import {feedbackOnboardingCrashApiJava} from 'sentry/gettingStartedDocs/java/java';
import {t, tct} from 'sentry/locale';
import {getPackageVersion} from 'sentry/utils/gettingStartedDocs/getPackageVersion';
@@ -342,7 +341,6 @@ const docs: Docs = {
onboarding,
feedbackOnboardingCrashApi: feedbackOnboardingCrashApiJava,
crashReportOnboarding: feedbackOnboardingCrashApiJava,
- customMetricsOnboarding: getJavaMetricsOnboarding(),
platformOptions,
};
diff --git a/static/app/gettingStartedDocs/java/spring-boot.tsx b/static/app/gettingStartedDocs/java/spring-boot.tsx
index 7acd0784ea000b..6a09f67afd806a 100644
--- a/static/app/gettingStartedDocs/java/spring-boot.tsx
+++ b/static/app/gettingStartedDocs/java/spring-boot.tsx
@@ -9,9 +9,11 @@ import type {
DocsParams,
OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
-import {getJavaMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
import {feedbackOnboardingCrashApiJava} from 'sentry/gettingStartedDocs/java/java';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
import {getPackageVersion} from 'sentry/utils/gettingStartedDocs/getPackageVersion';
@@ -296,7 +298,7 @@ const docs: Docs = {
platformOptions,
replayOnboardingJsLoader,
crashReportOnboarding: feedbackOnboardingCrashApiJava,
- customMetricsOnboarding: getJavaMetricsOnboarding(),
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/java/spring.tsx b/static/app/gettingStartedDocs/java/spring.tsx
index 024aab936db9d8..babb60c7acfc14 100644
--- a/static/app/gettingStartedDocs/java/spring.tsx
+++ b/static/app/gettingStartedDocs/java/spring.tsx
@@ -9,9 +9,11 @@ import type {
DocsParams,
OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
-import {getJavaMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
import {feedbackOnboardingCrashApiJava} from 'sentry/gettingStartedDocs/java/java';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
import {getPackageVersion} from 'sentry/utils/gettingStartedDocs/getPackageVersion';
@@ -369,7 +371,7 @@ const docs: Docs = {
platformOptions,
crashReportOnboarding: feedbackOnboardingCrashApiJava,
replayOnboardingJsLoader,
- customMetricsOnboarding: getJavaMetricsOnboarding(),
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/javascript/angular.spec.tsx b/static/app/gettingStartedDocs/javascript/angular.spec.tsx
index 8ae6ab47ca97b6..f611735ece3c37 100644
--- a/static/app/gettingStartedDocs/javascript/angular.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/angular.spec.tsx
@@ -37,13 +37,13 @@ describe('javascript-angular onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
).toBeInTheDocument();
});
diff --git a/static/app/gettingStartedDocs/javascript/angular.tsx b/static/app/gettingStartedDocs/javascript/angular.tsx
index 69ce7090e61b91..bc81bf044ca74b 100644
--- a/static/app/gettingStartedDocs/javascript/angular.tsx
+++ b/static/app/gettingStartedDocs/javascript/angular.tsx
@@ -474,7 +474,7 @@ const docs: Docs = {
crashReportOnboarding,
platformOptions,
profilingOnboarding,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/javascript/astro.tsx b/static/app/gettingStartedDocs/javascript/astro.tsx
index cdb2b2ae12948f..ee3b67b9c8fc93 100644
--- a/static/app/gettingStartedDocs/javascript/astro.tsx
+++ b/static/app/gettingStartedDocs/javascript/astro.tsx
@@ -212,7 +212,7 @@ const onboarding: OnboardingConfig = {
const replayOnboarding: OnboardingConfig = {
install: () => [
{
- ...getInstallConfig()[0],
+ ...getInstallConfig()[0]!,
additionalInfo:
'Session Replay is enabled by default when you install the Astro SDK!',
},
@@ -379,7 +379,7 @@ const docs: Docs = {
replayOnboarding,
customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}),
crashReportOnboarding,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/javascript/ember.spec.tsx b/static/app/gettingStartedDocs/javascript/ember.spec.tsx
index 8f431f9e8e7d8e..a3370015d3e053 100644
--- a/static/app/gettingStartedDocs/javascript/ember.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/ember.spec.tsx
@@ -32,13 +32,13 @@ describe('javascript-ember onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
).toBeInTheDocument();
});
diff --git a/static/app/gettingStartedDocs/javascript/ember.tsx b/static/app/gettingStartedDocs/javascript/ember.tsx
index 2f015dcf5a0fdb..b145037dfe2ff4 100644
--- a/static/app/gettingStartedDocs/javascript/ember.tsx
+++ b/static/app/gettingStartedDocs/javascript/ember.tsx
@@ -293,7 +293,7 @@ const docs: Docs = {
customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}),
crashReportOnboarding,
profilingOnboarding,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/javascript/gatsby.spec.tsx b/static/app/gettingStartedDocs/javascript/gatsby.spec.tsx
index bdb147ed4a0d1b..7a910b7ad7f826 100644
--- a/static/app/gettingStartedDocs/javascript/gatsby.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/gatsby.spec.tsx
@@ -34,13 +34,13 @@ describe('javascript-gatsby onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
).toBeInTheDocument();
});
diff --git a/static/app/gettingStartedDocs/javascript/gatsby.tsx b/static/app/gettingStartedDocs/javascript/gatsby.tsx
index f09be5ea3a51c8..10550df8ca30f4 100644
--- a/static/app/gettingStartedDocs/javascript/gatsby.tsx
+++ b/static/app/gettingStartedDocs/javascript/gatsby.tsx
@@ -322,7 +322,7 @@ const docs: Docs = {
customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}),
crashReportOnboarding,
profilingOnboarding,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/javascript/javascript.spec.tsx b/static/app/gettingStartedDocs/javascript/javascript.spec.tsx
index 5e2a593bdd454f..80f378217cfa83 100644
--- a/static/app/gettingStartedDocs/javascript/javascript.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/javascript.spec.tsx
@@ -41,13 +41,13 @@ describe('javascript onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
).toBeInTheDocument();
});
diff --git a/static/app/gettingStartedDocs/javascript/javascript.tsx b/static/app/gettingStartedDocs/javascript/javascript.tsx
index 12894d782401e0..2391e9228fe8eb 100644
--- a/static/app/gettingStartedDocs/javascript/javascript.tsx
+++ b/static/app/gettingStartedDocs/javascript/javascript.tsx
@@ -30,7 +30,10 @@ import {
getReplayConfigureDescription,
getReplayVerifyStep,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/replayOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {trackAnalytics} from 'sentry/utils/analytics';
@@ -683,6 +686,7 @@ ${FLAG_OPTIONS[featureFlagOptions.integration].sdkInit}
const docs: Docs = {
onboarding,
feedbackOnboardingNpm: feedbackOnboarding,
+ feedbackOnboardingJsLoader,
replayOnboarding,
replayOnboardingJsLoader,
performanceOnboarding,
diff --git a/static/app/gettingStartedDocs/javascript/jsLoader/jsLoader.tsx b/static/app/gettingStartedDocs/javascript/jsLoader/jsLoader.tsx
index 02259ed121e7bf..7b42028576fa01 100644
--- a/static/app/gettingStartedDocs/javascript/jsLoader/jsLoader.tsx
+++ b/static/app/gettingStartedDocs/javascript/jsLoader/jsLoader.tsx
@@ -60,6 +60,71 @@ const getVerifySnippet = () => `
});
`;
+const feedbackOnboardingJsLoader: OnboardingConfig = {
+ install: (params: Params) => [
+ {
+ type: StepType.INSTALL,
+ configurations: [
+ {
+ description: t('Add this script tag to the top of the page:'),
+ language: 'html',
+ code: beautify.html(
+ ``,
+ {indent_size: 2, wrap_attributes: 'force-expand-multiline'}
+ ),
+ },
+ ],
+ },
+ ],
+ configure: () => [
+ {
+ type: StepType.CONFIGURE,
+ description: t(
+ 'When using the Loader Script, you can lazy load the User Feedback integration like this:'
+ ),
+ configurations: [
+ {
+ code: [
+ {
+ label: 'JavaScript',
+ value: 'javascript',
+ language: 'javascript',
+ code: `
+window.sentryOnLoad = function () {
+ Sentry.init({
+ // add other configuration here
+ });
+
+ Sentry.lazyLoadIntegration("feedbackIntegration")
+ .then((feedbackIntegration) => {
+ Sentry.addIntegration(feedbackIntegration({
+ // User Feedback configuration options
+ }));
+ })
+ .catch(() => {
+ // this can happen if e.g. a network error occurs,
+ // in this case User Feedback will not be enabled
+ });
+};
+ `,
+ },
+ ],
+ },
+ ],
+ additionalInfo: tct(
+ `For a full list of User Feedback configuration options, [link:read the docs].`,
+ {
+ link: (
+
+ ),
+ }
+ ),
+ },
+ ],
+ verify: () => [],
+ nextSteps: () => [],
+};
+
const replayOnboardingJsLoader: OnboardingConfig = {
install: (params: Params) => getInstallConfig(params),
configure: (params: Params) => [
@@ -103,4 +168,4 @@ const StyledAlert = styled(Alert)`
margin: 0;
`;
-export default replayOnboardingJsLoader;
+export {feedbackOnboardingJsLoader, replayOnboardingJsLoader};
diff --git a/static/app/gettingStartedDocs/javascript/nextjs.spec.tsx b/static/app/gettingStartedDocs/javascript/nextjs.spec.tsx
index 9f24cae07b7500..bd09ca8433f325 100644
--- a/static/app/gettingStartedDocs/javascript/nextjs.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/nextjs.spec.tsx
@@ -31,7 +31,7 @@ describe('javascript-nextjs onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/sentry-example-page/))
+ screen.getByText(textWithMarkupMatcher(/sentry-example-page/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/javascript/nextjs.tsx b/static/app/gettingStartedDocs/javascript/nextjs.tsx
index f66cf459579809..d941af83f6f7b4 100644
--- a/static/app/gettingStartedDocs/javascript/nextjs.tsx
+++ b/static/app/gettingStartedDocs/javascript/nextjs.tsx
@@ -425,7 +425,7 @@ const docs: Docs = {
}),
performanceOnboarding,
crashReportOnboarding,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/javascript/nuxt.spec.tsx b/static/app/gettingStartedDocs/javascript/nuxt.spec.tsx
index c3796f6848e834..883bb5c9354bb6 100644
--- a/static/app/gettingStartedDocs/javascript/nuxt.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/nuxt.spec.tsx
@@ -37,7 +37,7 @@ describe('javascript-nuxt onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/sentry-example-page/))
+ screen.getByText(textWithMarkupMatcher(/sentry-example-page/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/javascript/nuxt.tsx b/static/app/gettingStartedDocs/javascript/nuxt.tsx
index 09d11fe6157697..a1ce40665ad5e8 100644
--- a/static/app/gettingStartedDocs/javascript/nuxt.tsx
+++ b/static/app/gettingStartedDocs/javascript/nuxt.tsx
@@ -264,7 +264,7 @@ const docs: Docs = {
customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}),
crashReportOnboarding,
profilingOnboarding,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/javascript/react.spec.tsx b/static/app/gettingStartedDocs/javascript/react.spec.tsx
index 89688044b8e7e4..14044a20e69c69 100644
--- a/static/app/gettingStartedDocs/javascript/react.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/react.spec.tsx
@@ -32,13 +32,13 @@ describe('javascript-react onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
).toBeInTheDocument();
});
diff --git a/static/app/gettingStartedDocs/javascript/react.tsx b/static/app/gettingStartedDocs/javascript/react.tsx
index 6768b95d467ea3..ebf49acf362cd5 100644
--- a/static/app/gettingStartedDocs/javascript/react.tsx
+++ b/static/app/gettingStartedDocs/javascript/react.tsx
@@ -431,7 +431,7 @@ const docs: Docs = {
performanceOnboarding,
crashReportOnboarding,
profilingOnboarding,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/javascript/remix.tsx b/static/app/gettingStartedDocs/javascript/remix.tsx
index 10005884f88ea9..958cbce997375e 100644
--- a/static/app/gettingStartedDocs/javascript/remix.tsx
+++ b/static/app/gettingStartedDocs/javascript/remix.tsx
@@ -289,7 +289,7 @@ const docs: Docs = {
replayOnboarding,
customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}),
crashReportOnboarding,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/javascript/solid.spec.tsx b/static/app/gettingStartedDocs/javascript/solid.spec.tsx
index ebe81275c36ee9..15b72aeb728bab 100644
--- a/static/app/gettingStartedDocs/javascript/solid.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/solid.spec.tsx
@@ -32,13 +32,13 @@ describe('javascript-solid onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
).toBeInTheDocument();
});
diff --git a/static/app/gettingStartedDocs/javascript/solid.tsx b/static/app/gettingStartedDocs/javascript/solid.tsx
index 08f96fa3bde531..8b702507d61251 100644
--- a/static/app/gettingStartedDocs/javascript/solid.tsx
+++ b/static/app/gettingStartedDocs/javascript/solid.tsx
@@ -324,7 +324,7 @@ const docs: Docs = {
customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}),
crashReportOnboarding,
profilingOnboarding,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/javascript/solidstart.tsx b/static/app/gettingStartedDocs/javascript/solidstart.tsx
index 610cd710478cf9..6513af9ba414d5 100644
--- a/static/app/gettingStartedDocs/javascript/solidstart.tsx
+++ b/static/app/gettingStartedDocs/javascript/solidstart.tsx
@@ -503,7 +503,7 @@ const docs: Docs = {
customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}),
crashReportOnboarding,
profilingOnboarding,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/javascript/svelte.spec.tsx b/static/app/gettingStartedDocs/javascript/svelte.spec.tsx
index 5f60c6747b2fbe..238d714eb3251d 100644
--- a/static/app/gettingStartedDocs/javascript/svelte.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/svelte.spec.tsx
@@ -34,13 +34,13 @@ describe('javascript-svelte onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
).toBeInTheDocument();
});
diff --git a/static/app/gettingStartedDocs/javascript/svelte.tsx b/static/app/gettingStartedDocs/javascript/svelte.tsx
index 37f72b7c8be392..5caf892736cf22 100644
--- a/static/app/gettingStartedDocs/javascript/svelte.tsx
+++ b/static/app/gettingStartedDocs/javascript/svelte.tsx
@@ -319,7 +319,7 @@ const docs: Docs = {
customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}),
crashReportOnboarding,
profilingOnboarding,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/javascript/sveltekit.spec.tsx b/static/app/gettingStartedDocs/javascript/sveltekit.spec.tsx
index 8157e2dead41ca..beac102e904b39 100644
--- a/static/app/gettingStartedDocs/javascript/sveltekit.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/sveltekit.spec.tsx
@@ -31,7 +31,7 @@ describe('javascript-sveltekit onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/sentry-example-page/))
+ screen.getByText(textWithMarkupMatcher(/sentry-example-page/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/javascript/sveltekit.tsx b/static/app/gettingStartedDocs/javascript/sveltekit.tsx
index 29e364a3baf97c..2a3afb29e869bc 100644
--- a/static/app/gettingStartedDocs/javascript/sveltekit.tsx
+++ b/static/app/gettingStartedDocs/javascript/sveltekit.tsx
@@ -256,7 +256,7 @@ const docs: Docs = {
replayOnboarding,
customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}),
crashReportOnboarding,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/javascript/vue.spec.tsx b/static/app/gettingStartedDocs/javascript/vue.spec.tsx
index 7c9d64c3f33300..7f2dbe3fd74f39 100644
--- a/static/app/gettingStartedDocs/javascript/vue.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/vue.spec.tsx
@@ -32,13 +32,13 @@ describe('javascript-vue onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
).toBeInTheDocument();
});
diff --git a/static/app/gettingStartedDocs/javascript/vue.tsx b/static/app/gettingStartedDocs/javascript/vue.tsx
index 691b1b5b8788a1..b74669bc886999 100644
--- a/static/app/gettingStartedDocs/javascript/vue.tsx
+++ b/static/app/gettingStartedDocs/javascript/vue.tsx
@@ -362,7 +362,7 @@ const docs: Docs = {
customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}),
crashReportOnboarding,
profilingOnboarding,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/node/awslambda.spec.tsx b/static/app/gettingStartedDocs/node/awslambda.spec.tsx
index cd3213c07825de..18646055bf6cff 100644
--- a/static/app/gettingStartedDocs/node/awslambda.spec.tsx
+++ b/static/app/gettingStartedDocs/node/awslambda.spec.tsx
@@ -37,10 +37,10 @@ describe('awslambda onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/profilesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/profilesSampleRate/))
).toBeInTheDocument();
});
@@ -102,10 +102,10 @@ describe('awslambda onboarding docs', function () {
// Should have start and stop profiling calls
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/node/azurefunctions.spec.tsx b/static/app/gettingStartedDocs/node/azurefunctions.spec.tsx
index 070ce8cdacf8b1..0be8c2479b1342 100644
--- a/static/app/gettingStartedDocs/node/azurefunctions.spec.tsx
+++ b/static/app/gettingStartedDocs/node/azurefunctions.spec.tsx
@@ -36,10 +36,10 @@ describe('express onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/profilesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/profilesSampleRate/))
).toBeInTheDocument();
});
@@ -101,10 +101,10 @@ describe('express onboarding docs', function () {
// Should have start and stop profiling calls
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/node/connect.spec.tsx b/static/app/gettingStartedDocs/node/connect.spec.tsx
index 76148142544a28..bceffca3f90f27 100644
--- a/static/app/gettingStartedDocs/node/connect.spec.tsx
+++ b/static/app/gettingStartedDocs/node/connect.spec.tsx
@@ -44,10 +44,10 @@ describe('connect onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/profilesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/profilesSampleRate/))
).toBeInTheDocument();
});
@@ -109,10 +109,10 @@ describe('connect onboarding docs', function () {
// Should have start and stop profiling calls
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/node/express.spec.tsx b/static/app/gettingStartedDocs/node/express.spec.tsx
index 5eefc56c1a2d3d..416d6a0fbf7ce9 100644
--- a/static/app/gettingStartedDocs/node/express.spec.tsx
+++ b/static/app/gettingStartedDocs/node/express.spec.tsx
@@ -45,10 +45,10 @@ describe('express onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/profilesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/profilesSampleRate/))
).toBeInTheDocument();
});
@@ -109,10 +109,10 @@ describe('express onboarding docs', function () {
// Should have start and stop profiling calls
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/node/express.tsx b/static/app/gettingStartedDocs/node/express.tsx
index e3a9c10cf926d3..1b6e474ccb868a 100644
--- a/static/app/gettingStartedDocs/node/express.tsx
+++ b/static/app/gettingStartedDocs/node/express.tsx
@@ -12,7 +12,10 @@ import {
getCrashReportModalIntroduction,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
import {getJSServerMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
import {
getImportInstrumentSnippet,
@@ -156,6 +159,7 @@ const docs: Docs = {
replayOnboardingJsLoader,
customMetricsOnboarding: getJSServerMetricsOnboarding(),
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/node/fastify.spec.tsx b/static/app/gettingStartedDocs/node/fastify.spec.tsx
index ed1e4fac370c44..79e6cfb25ddd75 100644
--- a/static/app/gettingStartedDocs/node/fastify.spec.tsx
+++ b/static/app/gettingStartedDocs/node/fastify.spec.tsx
@@ -45,10 +45,10 @@ describe('fastify onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/profilesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/profilesSampleRate/))
).toBeInTheDocument();
});
@@ -110,10 +110,10 @@ describe('fastify onboarding docs', function () {
// Should have start and stop profiling calls
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/node/fastify.tsx b/static/app/gettingStartedDocs/node/fastify.tsx
index 088042b1e0ebb9..5bc407a89aa40a 100644
--- a/static/app/gettingStartedDocs/node/fastify.tsx
+++ b/static/app/gettingStartedDocs/node/fastify.tsx
@@ -12,7 +12,10 @@ import {
getCrashReportModalIntroduction,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
import {getJSServerMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
import {
getImportInstrumentSnippet,
@@ -142,6 +145,7 @@ const docs: Docs = {
replayOnboardingJsLoader,
customMetricsOnboarding: getJSServerMetricsOnboarding(),
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/node/gcpfunctions.spec.tsx b/static/app/gettingStartedDocs/node/gcpfunctions.spec.tsx
index 4febd7e1acd019..29d76c707185f5 100644
--- a/static/app/gettingStartedDocs/node/gcpfunctions.spec.tsx
+++ b/static/app/gettingStartedDocs/node/gcpfunctions.spec.tsx
@@ -39,10 +39,10 @@ describe('gcpfunctions onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/profilesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/profilesSampleRate/))
).toBeInTheDocument();
});
@@ -104,10 +104,10 @@ describe('gcpfunctions onboarding docs', function () {
// Should have start and stop profiling calls
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/node/hapi.spec.tsx b/static/app/gettingStartedDocs/node/hapi.spec.tsx
index a5cfae4a703f1c..e85b6c1456cf12 100644
--- a/static/app/gettingStartedDocs/node/hapi.spec.tsx
+++ b/static/app/gettingStartedDocs/node/hapi.spec.tsx
@@ -45,10 +45,10 @@ describe('hapi onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/profilesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/profilesSampleRate/))
).toBeInTheDocument();
});
@@ -110,10 +110,10 @@ describe('hapi onboarding docs', function () {
// Should have start and stop profiling calls
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/node/koa.spec.tsx b/static/app/gettingStartedDocs/node/koa.spec.tsx
index 1c85e31199882d..500bc40dfb973e 100644
--- a/static/app/gettingStartedDocs/node/koa.spec.tsx
+++ b/static/app/gettingStartedDocs/node/koa.spec.tsx
@@ -45,10 +45,10 @@ describe('koa onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/profilesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/profilesSampleRate/))
).toBeInTheDocument();
});
@@ -110,10 +110,10 @@ describe('koa onboarding docs', function () {
// Should have start and stop profiling calls
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/node/nestjs.spec.tsx b/static/app/gettingStartedDocs/node/nestjs.spec.tsx
index 3dc29fce435acb..a703a91462068b 100644
--- a/static/app/gettingStartedDocs/node/nestjs.spec.tsx
+++ b/static/app/gettingStartedDocs/node/nestjs.spec.tsx
@@ -45,10 +45,10 @@ describe('Nest.js onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/profilesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/profilesSampleRate/))
).toBeInTheDocument();
});
@@ -110,10 +110,10 @@ describe('Nest.js onboarding docs', function () {
// Should have start and stop profiling calls
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/node/node.spec.tsx b/static/app/gettingStartedDocs/node/node.spec.tsx
index 74b553a342365b..0193ae6224a1c3 100644
--- a/static/app/gettingStartedDocs/node/node.spec.tsx
+++ b/static/app/gettingStartedDocs/node/node.spec.tsx
@@ -37,10 +37,10 @@ describe('node onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/tracesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/tracesSampleRate/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/profilesSampleRate/))
+ screen.getByText(textWithMarkupMatcher(/profilesSampleRate/))
).toBeInTheDocument();
});
@@ -102,10 +102,10 @@ describe('node onboarding docs', function () {
// Should have start and stop profiling calls
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.startProfiler/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
+ screen.getByText(textWithMarkupMatcher(/Sentry.profiler.stopProfiler/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/node/node.tsx b/static/app/gettingStartedDocs/node/node.tsx
index 63f6538f2f35e8..3a0d77609d97a2 100644
--- a/static/app/gettingStartedDocs/node/node.tsx
+++ b/static/app/gettingStartedDocs/node/node.tsx
@@ -12,7 +12,10 @@ import {
getCrashReportModalIntroduction,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
import {getJSServerMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
import {
getImportInstrumentSnippet,
@@ -252,6 +255,7 @@ const docs: Docs = {
performanceOnboarding,
crashReportOnboarding,
profilingOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/php/laravel.tsx b/static/app/gettingStartedDocs/php/laravel.tsx
index 2896cbf29538c7..609eb5303e3ee9 100644
--- a/static/app/gettingStartedDocs/php/laravel.tsx
+++ b/static/app/gettingStartedDocs/php/laravel.tsx
@@ -13,7 +13,10 @@ import {
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
import exampleSnippets from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsExampleSnippets';
import {metricTagsExplanation} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
type Params = DocsParams;
@@ -368,6 +371,7 @@ const docs: Docs = {
replayOnboardingJsLoader,
customMetricsOnboarding,
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/php/php.tsx b/static/app/gettingStartedDocs/php/php.tsx
index a4ebd8dd35cf46..7e8195dffe1b4c 100644
--- a/static/app/gettingStartedDocs/php/php.tsx
+++ b/static/app/gettingStartedDocs/php/php.tsx
@@ -13,7 +13,10 @@ import {
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
import exampleSnippets from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsExampleSnippets';
import {metricTagsExplanation} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
type Params = DocsParams;
@@ -332,6 +335,7 @@ const docs: Docs = {
customMetricsOnboarding,
performanceOnboarding,
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/php/symfony.tsx b/static/app/gettingStartedDocs/php/symfony.tsx
index 084e7703a3c774..f9f4c7632894b2 100644
--- a/static/app/gettingStartedDocs/php/symfony.tsx
+++ b/static/app/gettingStartedDocs/php/symfony.tsx
@@ -11,7 +11,10 @@ import {
getCrashReportModalIntroduction,
getCrashReportPHPInstallStep,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
type Params = DocsParams;
@@ -186,6 +189,7 @@ const docs: Docs = {
onboarding,
replayOnboardingJsLoader,
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/python/aiohttp.tsx b/static/app/gettingStartedDocs/python/aiohttp.tsx
index a64fcd7874bef4..f43ed88f14f9f4 100644
--- a/static/app/gettingStartedDocs/python/aiohttp.tsx
+++ b/static/app/gettingStartedDocs/python/aiohttp.tsx
@@ -7,7 +7,10 @@ import {
type OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {getPythonMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {
AlternativeConfiguration,
crashReportOnboardingPython,
@@ -168,7 +171,8 @@ const docs: Docs = {
installSnippet: getInstallSnippet(),
}),
crashReportOnboarding: crashReportOnboardingPython,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/python/asgi.spec.tsx b/static/app/gettingStartedDocs/python/asgi.spec.tsx
index b9f973ecf6c115..a400cc54150f63 100644
--- a/static/app/gettingStartedDocs/python/asgi.spec.tsx
+++ b/static/app/gettingStartedDocs/python/asgi.spec.tsx
@@ -49,7 +49,7 @@ describe('asgi onboarding docs', function () {
// Does render transaction profiling config
expect(
- screen.queryByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
+ screen.getByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
).toBeInTheDocument();
});
@@ -73,9 +73,7 @@ describe('asgi onboarding docs', function () {
// Does render continuous profiling config
expect(
- screen.queryByText(
- textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/)
- )
+ screen.getByText(textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/python/awslambda.spec.tsx b/static/app/gettingStartedDocs/python/awslambda.spec.tsx
index e381a95c32da44..f1469a835fe0fa 100644
--- a/static/app/gettingStartedDocs/python/awslambda.spec.tsx
+++ b/static/app/gettingStartedDocs/python/awslambda.spec.tsx
@@ -49,7 +49,7 @@ describe('awslambda onboarding docs', function () {
// Does render transaction profiling config
expect(
- screen.queryByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
+ screen.getByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
).toBeInTheDocument();
});
@@ -73,9 +73,7 @@ describe('awslambda onboarding docs', function () {
// Does render continuous profiling config
expect(
- screen.queryByText(
- textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/)
- )
+ screen.getByText(textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/python/bottle.tsx b/static/app/gettingStartedDocs/python/bottle.tsx
index 3d9e0eefd04a27..a12dadd52a9375 100644
--- a/static/app/gettingStartedDocs/python/bottle.tsx
+++ b/static/app/gettingStartedDocs/python/bottle.tsx
@@ -7,7 +7,10 @@ import {
type OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {getPythonMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {
AlternativeConfiguration,
crashReportOnboardingPython,
@@ -160,7 +163,8 @@ const docs: Docs = {
installSnippet: getInstallSnippet(),
}),
crashReportOnboarding: crashReportOnboardingPython,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/python/celery.spec.tsx b/static/app/gettingStartedDocs/python/celery.spec.tsx
index 506167760bd529..5299482fc68893 100644
--- a/static/app/gettingStartedDocs/python/celery.spec.tsx
+++ b/static/app/gettingStartedDocs/python/celery.spec.tsx
@@ -54,7 +54,7 @@ describe('celery onboarding docs', function () {
// Does render transaction profiling config
expect(
- screen.queryByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
+ screen.getByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
).toBeInTheDocument();
});
@@ -78,10 +78,10 @@ describe('celery onboarding docs', function () {
// Does render continuous profiling config
expect(
- screen.queryByText(textWithMarkupMatcher(/sentry_sdk.profiler.start_profiler\(\)/))
+ screen.getByText(textWithMarkupMatcher(/sentry_sdk.profiler.start_profiler\(\)/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/sentry_sdk.profiler.stop_profiler\(\)/))
+ screen.getByText(textWithMarkupMatcher(/sentry_sdk.profiler.stop_profiler\(\)/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/python/chalice.spec.tsx b/static/app/gettingStartedDocs/python/chalice.spec.tsx
index 75595805d4f1be..0d76a5b7eebf0f 100644
--- a/static/app/gettingStartedDocs/python/chalice.spec.tsx
+++ b/static/app/gettingStartedDocs/python/chalice.spec.tsx
@@ -51,7 +51,7 @@ describe('chalice onboarding docs', function () {
// Does render transaction profiling config
expect(
- screen.queryByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
+ screen.getByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
).toBeInTheDocument();
});
@@ -75,9 +75,7 @@ describe('chalice onboarding docs', function () {
// Does render continuous profiling config
expect(
- screen.queryByText(
- textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/)
- )
+ screen.getByText(textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/python/django.spec.tsx b/static/app/gettingStartedDocs/python/django.spec.tsx
index 52358361c64fa6..f752e23a64bf8c 100644
--- a/static/app/gettingStartedDocs/python/django.spec.tsx
+++ b/static/app/gettingStartedDocs/python/django.spec.tsx
@@ -51,7 +51,7 @@ describe('django onboarding docs', function () {
// Does render transaction profiling config
expect(
- screen.queryByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
+ screen.getByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
).toBeInTheDocument();
});
@@ -75,9 +75,7 @@ describe('django onboarding docs', function () {
// Does render continuous profiling config
expect(
- screen.queryByText(
- textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/)
- )
+ screen.getByText(textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/python/django.tsx b/static/app/gettingStartedDocs/python/django.tsx
index fd17c37fd4bcf2..89f1bd4e03deb5 100644
--- a/static/app/gettingStartedDocs/python/django.tsx
+++ b/static/app/gettingStartedDocs/python/django.tsx
@@ -7,7 +7,10 @@ import {
type OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {getPythonMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {
AlternativeConfiguration,
crashReportOnboardingPython,
@@ -230,7 +233,8 @@ const docs: Docs = {
}),
performanceOnboarding,
crashReportOnboarding: crashReportOnboardingPython,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/python/falcon.tsx b/static/app/gettingStartedDocs/python/falcon.tsx
index 8e83c85db10bf5..05efcbb651307b 100644
--- a/static/app/gettingStartedDocs/python/falcon.tsx
+++ b/static/app/gettingStartedDocs/python/falcon.tsx
@@ -7,7 +7,10 @@ import {
type OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {getPythonMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {
AlternativeConfiguration,
crashReportOnboardingPython,
@@ -160,7 +163,8 @@ const docs: Docs = {
installSnippet: getInstallSnippet(),
}),
crashReportOnboarding: crashReportOnboardingPython,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/python/fastapi.spec.tsx b/static/app/gettingStartedDocs/python/fastapi.spec.tsx
index 558c9e87fc9e9a..fc31c125e87495 100644
--- a/static/app/gettingStartedDocs/python/fastapi.spec.tsx
+++ b/static/app/gettingStartedDocs/python/fastapi.spec.tsx
@@ -51,7 +51,7 @@ describe('flask onboarding docs', function () {
// Does render transaction profiling config
expect(
- screen.queryByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
+ screen.getByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
).toBeInTheDocument();
});
@@ -75,9 +75,7 @@ describe('flask onboarding docs', function () {
// Does render continuous profiling config
expect(
- screen.queryByText(
- textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/)
- )
+ screen.getByText(textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/python/fastapi.tsx b/static/app/gettingStartedDocs/python/fastapi.tsx
index 6097b8ab64781e..6fe895e6b88a02 100644
--- a/static/app/gettingStartedDocs/python/fastapi.tsx
+++ b/static/app/gettingStartedDocs/python/fastapi.tsx
@@ -9,7 +9,10 @@ import {
type OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {getPythonMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {
AlternativeConfiguration,
crashReportOnboardingPython,
@@ -170,7 +173,8 @@ const docs: Docs = {
installSnippet: getInstallSnippet(),
}),
crashReportOnboarding: crashReportOnboardingPython,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/python/flask.tsx b/static/app/gettingStartedDocs/python/flask.tsx
index 45165f5adf5b8f..85424ad7db2263 100644
--- a/static/app/gettingStartedDocs/python/flask.tsx
+++ b/static/app/gettingStartedDocs/python/flask.tsx
@@ -9,7 +9,10 @@ import {
type OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {getPythonMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {
AlternativeConfiguration,
crashReportOnboardingPython,
@@ -241,7 +244,8 @@ const docs: Docs = {
}),
performanceOnboarding,
crashReportOnboarding: crashReportOnboardingPython,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/python/gcpfunctions.spec.tsx b/static/app/gettingStartedDocs/python/gcpfunctions.spec.tsx
index 92b06adaf87408..807da1c0146101 100644
--- a/static/app/gettingStartedDocs/python/gcpfunctions.spec.tsx
+++ b/static/app/gettingStartedDocs/python/gcpfunctions.spec.tsx
@@ -49,7 +49,7 @@ describe('gcpfunctions onboarding docs', function () {
// Does render transaction profiling config
expect(
- screen.queryByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
+ screen.getByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
).toBeInTheDocument();
});
@@ -73,9 +73,7 @@ describe('gcpfunctions onboarding docs', function () {
// Does render continuous profiling config
expect(
- screen.queryByText(
- textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/)
- )
+ screen.getByText(textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/python/pyramid.tsx b/static/app/gettingStartedDocs/python/pyramid.tsx
index 12a7503f3d0bd4..7e76da11a97790 100644
--- a/static/app/gettingStartedDocs/python/pyramid.tsx
+++ b/static/app/gettingStartedDocs/python/pyramid.tsx
@@ -6,7 +6,10 @@ import type {
OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {getPythonMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {
crashReportOnboardingPython,
featureFlagOnboarding,
@@ -109,7 +112,8 @@ const docs: Docs = {
installSnippet: getInstallSnippet(),
}),
crashReportOnboarding: crashReportOnboardingPython,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/python/python.spec.tsx b/static/app/gettingStartedDocs/python/python.spec.tsx
index dad0242457604d..0306d5ae975343 100644
--- a/static/app/gettingStartedDocs/python/python.spec.tsx
+++ b/static/app/gettingStartedDocs/python/python.spec.tsx
@@ -50,7 +50,7 @@ describe('python onboarding docs', function () {
// Does render transaction profiling config
expect(
- screen.queryByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
+ screen.getByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
).toBeInTheDocument();
});
@@ -74,10 +74,10 @@ describe('python onboarding docs', function () {
// Does render continuous profiling config
expect(
- screen.queryByText(textWithMarkupMatcher(/sentry_sdk.profiler.start_profiler\(\)/))
+ screen.getByText(textWithMarkupMatcher(/sentry_sdk.profiler.start_profiler\(\)/))
).toBeInTheDocument();
expect(
- screen.queryByText(textWithMarkupMatcher(/sentry_sdk.profiler.stop_profiler\(\)/))
+ screen.getByText(textWithMarkupMatcher(/sentry_sdk.profiler.stop_profiler\(\)/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/python/quart.tsx b/static/app/gettingStartedDocs/python/quart.tsx
index 8dd51d2648eff1..d6ed372a36b80b 100644
--- a/static/app/gettingStartedDocs/python/quart.tsx
+++ b/static/app/gettingStartedDocs/python/quart.tsx
@@ -7,7 +7,10 @@ import {
type OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {getPythonMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {
AlternativeConfiguration,
crashReportOnboardingPython,
@@ -157,7 +160,8 @@ const docs: Docs = {
installSnippet: getInstallSnippet(),
}),
crashReportOnboarding: crashReportOnboardingPython,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/python/sanic.tsx b/static/app/gettingStartedDocs/python/sanic.tsx
index 349c78cc984e8c..3c5c982c9e419c 100644
--- a/static/app/gettingStartedDocs/python/sanic.tsx
+++ b/static/app/gettingStartedDocs/python/sanic.tsx
@@ -6,7 +6,10 @@ import type {
OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {getPythonMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {
crashReportOnboardingPython,
featureFlagOnboarding,
@@ -119,7 +122,8 @@ const docs: Docs = {
installSnippet: getInstallSnippet(),
}),
crashReportOnboarding: crashReportOnboardingPython,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/python/serverless.spec.tsx b/static/app/gettingStartedDocs/python/serverless.spec.tsx
index 6d61c5e95c54f1..d7e2f417970f11 100644
--- a/static/app/gettingStartedDocs/python/serverless.spec.tsx
+++ b/static/app/gettingStartedDocs/python/serverless.spec.tsx
@@ -49,7 +49,7 @@ describe('serverless onboarding docs', function () {
// Does render transaction profiling config
expect(
- screen.queryByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
+ screen.getByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
).toBeInTheDocument();
});
@@ -73,9 +73,7 @@ describe('serverless onboarding docs', function () {
// Does render continuous profiling config
expect(
- screen.queryByText(
- textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/)
- )
+ screen.getByText(textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/python/starlette.tsx b/static/app/gettingStartedDocs/python/starlette.tsx
index 6d274e8851e602..0c2984225462a1 100644
--- a/static/app/gettingStartedDocs/python/starlette.tsx
+++ b/static/app/gettingStartedDocs/python/starlette.tsx
@@ -7,7 +7,10 @@ import {
type OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {getPythonMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {
AlternativeConfiguration,
crashReportOnboardingPython,
@@ -157,7 +160,8 @@ const docs: Docs = {
installSnippet: getInstallSnippet(),
}),
crashReportOnboarding: crashReportOnboardingPython,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/python/tornado.tsx b/static/app/gettingStartedDocs/python/tornado.tsx
index 77caab3e6b849f..7839aa2f4bd992 100644
--- a/static/app/gettingStartedDocs/python/tornado.tsx
+++ b/static/app/gettingStartedDocs/python/tornado.tsx
@@ -7,7 +7,10 @@ import {
type OnboardingConfig,
} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {getPythonMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {
AlternativeConfiguration,
crashReportOnboardingPython,
@@ -178,7 +181,8 @@ const docs: Docs = {
installSnippet: getInstallSnippet(),
}),
crashReportOnboarding: crashReportOnboardingPython,
- featureFlagOnboarding: featureFlagOnboarding,
+ featureFlagOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/gettingStartedDocs/python/tryton.spec.tsx b/static/app/gettingStartedDocs/python/tryton.spec.tsx
index 7718e1ed085990..69f928b2d40604 100644
--- a/static/app/gettingStartedDocs/python/tryton.spec.tsx
+++ b/static/app/gettingStartedDocs/python/tryton.spec.tsx
@@ -42,7 +42,7 @@ describe('tryton onboarding docs', function () {
// Does render transaction profiling config
expect(
- screen.queryByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
+ screen.getByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
).toBeInTheDocument();
});
@@ -66,9 +66,7 @@ describe('tryton onboarding docs', function () {
// Does render continuous profiling config
expect(
- screen.queryByText(
- textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/)
- )
+ screen.getByText(textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/python/wsgi.spec.tsx b/static/app/gettingStartedDocs/python/wsgi.spec.tsx
index 4dcaf19826bd87..b7aac850132c6c 100644
--- a/static/app/gettingStartedDocs/python/wsgi.spec.tsx
+++ b/static/app/gettingStartedDocs/python/wsgi.spec.tsx
@@ -49,7 +49,7 @@ describe('wsgi onboarding docs', function () {
// Does render transaction profiling config
expect(
- screen.queryByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
+ screen.getByText(textWithMarkupMatcher(/profiles_sample_rate=1\.0,/))
).toBeInTheDocument();
});
@@ -73,9 +73,7 @@ describe('wsgi onboarding docs', function () {
// Does render continuous profiling config
expect(
- screen.queryByText(
- textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/)
- )
+ screen.getByText(textWithMarkupMatcher(/"continuous_profiling_auto_start": True,/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/ruby/rails.tsx b/static/app/gettingStartedDocs/ruby/rails.tsx
index f88b218e408469..0de89fa880e56e 100644
--- a/static/app/gettingStartedDocs/ruby/rails.tsx
+++ b/static/app/gettingStartedDocs/ruby/rails.tsx
@@ -11,7 +11,10 @@ import {
getCrashReportSDKInstallFirstStepRails,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
import {getRubyMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
-import replayOnboardingJsLoader from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
+import {
+ feedbackOnboardingJsLoader,
+ replayOnboardingJsLoader,
+} from 'sentry/gettingStartedDocs/javascript/jsLoader/jsLoader';
import {t, tct} from 'sentry/locale';
type Params = DocsParams;
@@ -199,6 +202,7 @@ const docs: Docs = {
customMetricsOnboarding: getRubyMetricsOnboarding(),
replayOnboardingJsLoader,
crashReportOnboarding,
+ feedbackOnboardingJsLoader,
};
export default docs;
diff --git a/static/app/locale.spec.tsx b/static/app/locale.spec.tsx
index 24ed1da55610ad..d8b2ca2bc6a6fd 100644
--- a/static/app/locale.spec.tsx
+++ b/static/app/locale.spec.tsx
@@ -1,6 +1,7 @@
import {render, screen} from 'sentry-test/reactTestingLibrary';
import {textWithMarkupMatcher} from 'sentry-test/utils';
+// eslint-disable-next-line no-restricted-imports
import {tct} from 'sentry/locale';
describe('locale.gettextComponentTemplate', () => {
@@ -73,7 +74,7 @@ describe('locale.gettextComponentTemplate', () => {
);
expect(screen.getByText(textWithMarkupMatcher('one two three'))).toBeInTheDocument();
- expect(container.innerHTML).toEqual('one two three
');
+ expect(container.innerHTML).toBe('one two three
');
});
it('should render nested goups', () => {
@@ -89,7 +90,7 @@ describe('locale.gettextComponentTemplate', () => {
expect(
screen.getByText(textWithMarkupMatcher('text with another group'))
).toBeInTheDocument();
- expect(container.innerHTML).toEqual(
+ expect(container.innerHTML).toBe(
''
);
});
diff --git a/static/app/locale.tsx b/static/app/locale.tsx
index 566e7cfc82b34f..d41577dd297665 100644
--- a/static/app/locale.tsx
+++ b/static/app/locale.tsx
@@ -214,7 +214,7 @@ export function parseComponentTemplate(template: string): ParsedTemplate {
} else {
pos = regex.lastIndex = process(regex.lastIndex, currentGroupId, true);
}
- buf.push({group: groupName, id: currentGroupId});
+ buf.push({group: groupName!, id: currentGroupId});
}
let endPos = regex.lastIndex;
diff --git a/static/app/plugins/registry.tsx b/static/app/plugins/registry.tsx
index 52635e140063b1..554737db5aa101 100644
--- a/static/app/plugins/registry.tsx
+++ b/static/app/plugins/registry.tsx
@@ -31,7 +31,7 @@ export default class Registry {
}
}
console.info(
- '[plugins] Loaded ' + data.id + ' as {' + this.plugins[data.id].name + '}'
+ '[plugins] Loaded ' + data.id + ' as {' + this.plugins[data.id]!.name + '}'
);
callback(this.get(data));
}
diff --git a/static/app/plugins/sessionstack/contexts/sessionstack.tsx b/static/app/plugins/sessionstack/contexts/sessionstack.tsx
index 77bff3a80f6b7b..430e2ed1057662 100644
--- a/static/app/plugins/sessionstack/contexts/sessionstack.tsx
+++ b/static/app/plugins/sessionstack/contexts/sessionstack.tsx
@@ -1,5 +1,6 @@
import {Component} from 'react';
-import {findDOMNode} from 'react-dom';
+
+import {uniqueId} from 'sentry/utils/guid';
const ASPECT_RATIO = 16 / 9;
@@ -21,9 +22,6 @@ class SessionStackContextType extends Component {
};
componentDidMount() {
- // eslint-disable-next-line react/no-find-dom-node
- const domNode = findDOMNode(this) as HTMLElement;
- this.parentNode = domNode.parentNode as HTMLElement;
window.addEventListener('resize', () => this.setIframeSize(), false);
this.setIframeSize();
}
@@ -31,16 +29,17 @@ class SessionStackContextType extends Component {
componentWillUnmount() {
window.removeEventListener('resize', () => this.setIframeSize(), false);
}
- parentNode?: HTMLElement;
+ iframeContainerId = uniqueId();
getTitle = () => 'SessionStack';
setIframeSize() {
- if (this.state.showIframe || !this.parentNode) {
+ const parentNode = document.getElementById(this.iframeContainerId)?.parentElement;
+ if (!this.state.showIframe || !parentNode) {
return;
}
- const parentWidth = this.parentNode.clientWidth;
+ const parentWidth = parentNode.clientWidth;
this.setState({
width: parentWidth,
@@ -64,7 +63,7 @@ class SessionStackContextType extends Component {
}
return (
-
+
{this.state.showIframe ? (