From fa5ca5102b0a7c0554840e0e571d1896484cf1b6 Mon Sep 17 00:00:00 2001 From: Mark Story Date: Fri, 9 Aug 2019 14:06:38 -0400 Subject: [PATCH 1/3] feat(events2) Add metadata to responses. Add metadata to responses to allow the API to convey data types to the client. Refs SEN-908 --- .../api/endpoints/organization_events.py | 30 ++++-- .../discover/endpoints/discover_query.py | 31 +----- src/sentry/utils/snuba.py | 30 ++++++ tests/sentry/utils/test_snuba.py | 15 ++- .../endpoints/test_organization_events_v2.py | 98 +++++++++++-------- 5 files changed, 124 insertions(+), 80 deletions(-) diff --git a/src/sentry/api/endpoints/organization_events.py b/src/sentry/api/endpoints/organization_events.py index 7cd3e3a1d68cf2..0a4c13e1066ed7 100644 --- a/src/sentry/api/endpoints/organization_events.py +++ b/src/sentry/api/endpoints/organization_events.py @@ -10,11 +10,7 @@ from sentry.api.paginator import GenericOffsetPaginator from sentry.api.serializers import EventSerializer, serialize, SimpleEventSerializer from sentry.models import SnubaEvent -from sentry.utils.snuba import ( - raw_query, - SnubaError, - transform_aliases_and_query, -) +from sentry.utils import snuba from sentry import features from sentry.models.project import Project @@ -49,8 +45,7 @@ def get(self, request, organization): }, status=400) data_fn = partial( - lambda **kwargs: transform_aliases_and_query( - skip_conditions=True, **kwargs)['data'], + lambda **kwargs: snuba.transform_aliases_and_query(skip_conditions=True, **kwargs), referrer='api.organization-events-v2', **snuba_args ) @@ -59,10 +54,10 @@ def get(self, request, organization): return self.paginate( request=request, paginator=GenericOffsetPaginator(data_fn=data_fn), - on_results=lambda results: self.handle_results( + on_results=lambda results: self.handle_results_with_meta( request, organization, params['project_id'], results), ) - except SnubaError as error: + except snuba.SnubaError as error: logger.info( 'organization.events.snuba-error', extra={ @@ -105,7 +100,7 @@ def get_legacy(self, request, organization): snuba_cols = SnubaEvent.minimal_columns if full else SnubaEvent.selected_columns data_fn = partial( # extract 'data' from raw_query result - lambda *args, **kwargs: raw_query(*args, **kwargs)['data'], + lambda *args, **kwargs: snuba.raw_query(*args, **kwargs)['data'], selected_columns=snuba_cols, orderby='-timestamp', referrer='api.organization-events', @@ -120,6 +115,21 @@ def get_legacy(self, request, organization): paginator=GenericOffsetPaginator(data_fn=data_fn) ) + def handle_results_with_meta(self, request, organization, project_ids, results): + data = self.handle_results(request, organization, project_ids, results.get('data')) + if not data: + return {'data': [], 'meta': {}} + + meta = {value['name']: snuba.get_json_type(value['type']) for value in results['meta']} + # Ensure all columns in the result have types. + for key in data[0]: + if key not in meta: + meta[key] = 'string' + return { + 'meta': meta, + 'data': data, + } + def handle_results(self, request, organization, project_ids, results): if not results: return results diff --git a/src/sentry/discover/endpoints/discover_query.py b/src/sentry/discover/endpoints/discover_query.py index db475f936624e9..ee55494fd03b90 100644 --- a/src/sentry/discover/endpoints/discover_query.py +++ b/src/sentry/discover/endpoints/discover_query.py @@ -1,6 +1,5 @@ from __future__ import absolute_import -import re from functools import partial from copy import deepcopy @@ -25,34 +24,6 @@ class DiscoverQueryPermission(OrganizationPermission): class DiscoverQueryEndpoint(OrganizationEndpoint): permission_classes = (DiscoverQueryPermission, ) - def get_json_type(self, snuba_type): - """ - Convert Snuba/Clickhouse type to JSON type - Default is string - """ - - # Ignore Nullable part - nullable_match = re.search(r'^Nullable\((.+)\)$', snuba_type) - - if nullable_match: - snuba_type = nullable_match.group(1) - # Check for array - - array_match = re.search(r'^Array\(.+\)$', snuba_type) - if array_match: - return 'array' - - types = { - 'UInt8': 'boolean', - 'UInt16': 'integer', - 'UInt32': 'integer', - 'UInt64': 'integer', - 'Float32': 'number', - 'Float64': 'number', - } - - return types.get(snuba_type, 'string') - def handle_results(self, snuba_results, requested_query, projects): if 'project.name' in requested_query['selected_columns']: project_name_index = requested_query['selected_columns'].index('project.name') @@ -88,7 +59,7 @@ def handle_results(self, snuba_results, requested_query, projects): # Convert snuba types to json types for col in snuba_results['meta']: - col['type'] = self.get_json_type(col.get('type')) + col['type'] = snuba.get_json_type(col.get('type')) return snuba_results diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index cdc97aae9428e6..ed957f0799d229 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -786,6 +786,36 @@ def nest_groups(data, groups, aggregate_cols): ) +JSON_TYPE_MAP = { + 'UInt8': 'boolean', + 'UInt16': 'integer', + 'UInt32': 'integer', + 'UInt64': 'integer', + 'Float32': 'number', + 'Float64': 'number', + 'DateTime': 'date', +} + + +def get_json_type(snuba_type): + """ + Convert Snuba/Clickhouse type to JSON type + Default is string + """ + # Ignore Nullable part + nullable_match = re.search(r'^Nullable\((.+)\)$', snuba_type) + + if nullable_match: + snuba_type = nullable_match.group(1) + + # Check for array + array_match = re.search(r'^Array\(.+\)$', snuba_type) + if array_match: + return 'array' + + return JSON_TYPE_MAP.get(snuba_type, 'string') + + # The following are functions for resolving information from sentry models # about projects, environments, and issues (groups). Having this snuba # implementation have to know about these relationships is not ideal, and diff --git a/tests/sentry/utils/test_snuba.py b/tests/sentry/utils/test_snuba.py index 9b6eac79c23672..c88b759b04aed5 100644 --- a/tests/sentry/utils/test_snuba.py +++ b/tests/sentry/utils/test_snuba.py @@ -5,7 +5,7 @@ from sentry.models import GroupRelease, Release from sentry.testutils import TestCase -from sentry.utils.snuba import get_snuba_translators, zerofill +from sentry.utils.snuba import get_snuba_translators, zerofill, get_json_type class SnubaUtilsTest(TestCase): @@ -183,3 +183,16 @@ def test_zerofill(self): assert results[0]['time'] == 1546387200 assert results[7]['time'] == 1546992000 + + def test_get_json_type(self): + assert get_json_type('UInt8') == 'boolean' + assert get_json_type('UInt16') == 'integer' + assert get_json_type('UInt32') == 'integer' + assert get_json_type('UInt64') == 'integer' + assert get_json_type('Float32') == 'number' + assert get_json_type('Float64') == 'number' + assert get_json_type('Nullable(Float64)') == 'number' + assert get_json_type('Array(String)') == 'array' + assert get_json_type('Char') == 'string' + assert get_json_type('unknown') == 'string' + assert get_json_type('') == 'string' diff --git a/tests/snuba/api/endpoints/test_organization_events_v2.py b/tests/snuba/api/endpoints/test_organization_events_v2.py index dfedbef93eb5e4..d0034f3911436b 100644 --- a/tests/snuba/api/endpoints/test_organization_events_v2.py +++ b/tests/snuba/api/endpoints/test_organization_events_v2.py @@ -126,10 +126,17 @@ def test_raw_data(self): ) assert response.status_code == 200, response.content - assert len(response.data) == 2 - assert response.data[0]['id'] == 'b' * 32 - assert response.data[0]['project.id'] == project.id - assert response.data[0]['user.email'] == 'foo@example.com' + data = response.data['data'] + assert len(data) == 2 + assert data[0]['id'] == 'b' * 32 + assert data[0]['project.id'] == project.id + assert data[0]['user.email'] == 'foo@example.com' + meta = response.data['meta'] + assert meta['id'] == 'string' + assert meta['project.name'] == 'string' + assert meta['user.email'] == 'string' + assert meta['user.ip'] == 'string' + assert meta['timestamp'] == 'date' def test_project_name(self): self.login_as(user=self.user) @@ -153,10 +160,10 @@ def test_project_name(self): ) assert response.status_code == 200, response.content - assert len(response.data) == 1 - assert response.data[0]['project.name'] == project.slug - assert 'project.id' not in response.data[0] - assert response.data[0]['environment'] == 'staging' + assert len(response.data['data']) == 1 + assert response.data['data'][0]['project.name'] == project.slug + assert 'project.id' not in response.data['data'][0] + assert response.data['data'][0]['environment'] == 'staging' def test_implicit_groupby(self): self.login_as(user=self.user) @@ -197,21 +204,24 @@ def test_implicit_groupby(self): ) assert response.status_code == 200, response.content - assert len(response.data) == 2 - assert response.data[0] == { + assert len(response.data['data']) == 2 + data = response.data['data'] + assert data[0] == { 'project.id': project.id, 'project.name': project.slug, 'issue.id': event1.group_id, 'count_id': 2, 'latest_event': event1.event_id, } - assert response.data[1] == { + assert data[1] == { 'project.id': project.id, 'project.name': project.slug, 'issue.id': event2.group_id, 'count_id': 1, 'latest_event': event2.event_id, } + meta = response.data['meta'] + assert meta['count_id'] == 'integer' def test_automatic_id_and_project(self): self.login_as(user=self.user) @@ -243,12 +253,17 @@ def test_automatic_id_and_project(self): ) assert response.status_code == 200, response.content - assert len(response.data) == 1 - assert response.data[0] == { + assert len(response.data['data']) == 1 + data = response.data['data'] + assert data[0] == { 'project.name': project.slug, 'count_id': 2, 'latest_event': event.event_id, } + meta = response.data['meta'] + assert meta['count_id'] == 'integer' + assert meta['project.name'] == 'string' + assert meta['latest_event'] == 'string' def test_orderby(self): self.login_as(user=self.user) @@ -285,9 +300,10 @@ def test_orderby(self): ) assert response.status_code == 200, response.content - assert response.data[0]['id'] == 'c' * 32 - assert response.data[1]['id'] == 'b' * 32 - assert response.data[2]['id'] == 'a' * 32 + data = response.data['data'] + assert data[0]['id'] == 'c' * 32 + assert data[1]['id'] == 'b' * 32 + assert data[2]['id'] == 'a' * 32 def test_sort_title(self): self.login_as(user=self.user) @@ -327,9 +343,10 @@ def test_sort_title(self): ) assert response.status_code == 200, response.content - assert response.data[0]['id'] == 'c' * 32 - assert response.data[1]['id'] == 'b' * 32 - assert response.data[2]['id'] == 'a' * 32 + data = response.data['data'] + assert data[0]['id'] == 'c' * 32 + assert data[1]['id'] == 'b' * 32 + assert data[2]['id'] == 'a' * 32 def test_sort_invalid(self): self.login_as(user=self.user) @@ -401,17 +418,18 @@ def test_aliased_fields(self): ) assert response.status_code == 200, response.content - assert len(response.data) == 2 - assert response.data[0]['issue.id'] == event1.group_id - assert response.data[0]['count_id'] == 1 - assert response.data[0]['count_unique_user'] == 1 - assert 'latest_event' in response.data[0] - assert 'project.name' in response.data[0] - assert 'projectid' not in response.data[0] - assert 'project.id' not in response.data[0] - assert response.data[1]['issue.id'] == event2.group_id - assert response.data[1]['count_id'] == 2 - assert response.data[1]['count_unique_user'] == 2 + assert len(response.data['data']) == 2 + data = response.data['data'] + assert data[0]['issue.id'] == event1.group_id + assert data[0]['count_id'] == 1 + assert data[0]['count_unique_user'] == 1 + assert 'latest_event' in data[0] + assert 'project.name' in data[0] + assert 'projectid' not in data[0] + assert 'project.id' not in data[0] + assert data[1]['issue.id'] == event2.group_id + assert data[1]['count_id'] == 2 + assert data[1]['count_unique_user'] == 2 @pytest.mark.xfail(reason='aggregate comparisons need parser improvements') def test_aggregation_comparison(self): @@ -486,10 +504,11 @@ def test_aggregation_comparison(self): assert response.status_code == 200, response.content - assert len(response.data) == 1 - assert response.data[0]['issue.id'] == event.group_id - assert response.data[0]['count_id'] == 2 - assert response.data[0]['count_unique_user'] == 2 + assert len(response.data['data']) == 1 + data = response.data['data'] + assert data[0]['issue.id'] == event.group_id + assert data[0]['count_id'] == 2 + assert data[0]['count_unique_user'] == 2 @pytest.mark.xfail(reason='aggregate comparisons need parser improvements') def test_aggregation_comparison_with_conditions(self): @@ -557,9 +576,10 @@ def test_aggregation_comparison_with_conditions(self): assert response.status_code == 200, response.content - assert len(response.data) == 1 - assert response.data[0]['issue.id'] == event.group_id - assert response.data[0]['count_id'] == 2 + assert len(response.data['data']) == 1 + data = response.data['data'] + assert data[0]['issue.id'] == event.group_id + assert data[0]['count_id'] == 2 def test_nonexistent_fields(self): self.login_as(user=self.user) @@ -583,7 +603,7 @@ def test_nonexistent_fields(self): }, ) assert response.status_code == 200, response.content - assert response.data[0]['issue_world.id'] == '' + assert response.data['data'][0]['issue_world.id'] == '' def test_no_requested_fields_or_grouping(self): self.login_as(user=self.user) @@ -636,7 +656,7 @@ def test_condition_on_aggregate_fails(self): ) assert response.status_code == 200, response.content - assert len(response.data) == 0 + assert len(response.data['data']) == 0 def test_group_filtering(self): user = self.create_user() From d951a9f10d7949da579dc8a4c67d96b10c51d9e2 Mon Sep 17 00:00:00 2001 From: Mark Story Date: Fri, 9 Aug 2019 15:56:53 -0400 Subject: [PATCH 2/3] Update UI to use new API data and generic column formatting. --- .../app/views/organizationEventsV2/data.jsx | 98 +++++++++++++------ .../organizationEventsV2/relatedEvents.jsx | 6 +- .../app/views/organizationEventsV2/table.jsx | 34 +++---- .../app/views/organizationEventsV2/utils.jsx | 24 ++++- .../eventDetails.spec.jsx | 22 +++-- .../views/organizationEventsV2/index.spec.jsx | 22 +++-- 6 files changed, 133 insertions(+), 73 deletions(-) diff --git a/src/sentry/static/sentry/app/views/organizationEventsV2/data.jsx b/src/sentry/static/sentry/app/views/organizationEventsV2/data.jsx index f2c023c092085c..4a13abd8c9e80a 100644 --- a/src/sentry/static/sentry/app/views/organizationEventsV2/data.jsx +++ b/src/sentry/static/sentry/app/views/organizationEventsV2/data.jsx @@ -89,6 +89,72 @@ export const ALL_VIEWS = deepFreeze([ }, ]); +/** + * A mapping of field types to their rendering function. + * This mapping is used when a field is not defined in SPECIAL_FIELDS + * This mapping should match the output sentry.utils.snuba:get_json_type + */ +export const FIELD_FORMATTERS = { + boolean: { + sortField: true, + renderFunc: (field, data, {organization, location}) => { + const target = { + pathname: `/organizations/${organization.slug}/events/`, + query: { + ...location.query, + query: `${field}:${data[field]}`, + }, + }; + const value = data[field] ? t('yes') : t('no'); + return {value}; + }, + }, + integer: { + sortField: true, + renderFunc: (field, data) => ( + + {typeof data[field] === 'number' ? : null} + + ), + }, + number: { + sortField: true, + renderFunc: (field, data) => ( + + {typeof data[field] === 'number' ? : null} + + ), + }, + date: { + sortField: true, + renderFunc: (field, data) => ( + + {data[field] ? ( + getDynamicText({ + value: , + fixed: 'timestamp', + }) + ) : ( + t('n/a') + )} + + ), + }, + string: { + sortField: false, + renderFunc: (field, data, {organization, location}) => { + const target = { + pathname: `/organizations/${organization.slug}/events/`, + query: { + ...location.query, + query: `${field}:${data[field]}`, + }, + }; + return {data[field]}; + }, + }, +}; + /** * "Special fields" do not map 1:1 to an single column in the event database, * they are a UI concept that combines the results of multiple fields and @@ -189,19 +255,6 @@ export const SPECIAL_FIELDS = { return {badge}; }, }, - timestamp: { - sortField: 'timestamp', - renderFunc: data => ( - - {data.timestamp - ? getDynamicText({ - value: , - fixed: 'time', - }) - : null} - - ), - }, issue_title: { sortField: 'issue_title', renderFunc: (data, {organization, location}) => { @@ -221,25 +274,6 @@ export const SPECIAL_FIELDS = { ); }, }, - // TODO generalize this. - 'count(id)': { - sortField: 'count_id', - renderFunc: data => ( - - {typeof data.count_id === 'number' ? : null} - - ), - }, - 'count_unique(user)': { - sortField: 'unique_count_user', - renderFunc: data => ( - - {typeof data.count_unique_user === 'number' ? ( - - ) : null} - - ), - }, last_seen: { sortField: 'last_seen', renderFunc: data => { diff --git a/src/sentry/static/sentry/app/views/organizationEventsV2/relatedEvents.jsx b/src/sentry/static/sentry/app/views/organizationEventsV2/relatedEvents.jsx index dedb7914e0bfc4..6310aadcec4aae 100644 --- a/src/sentry/static/sentry/app/views/organizationEventsV2/relatedEvents.jsx +++ b/src/sentry/static/sentry/app/views/organizationEventsV2/relatedEvents.jsx @@ -62,7 +62,7 @@ class RelatedEvents extends AsyncComponent { renderBody() { const {location, projects, event} = this.props; const {events} = this.state; - if (!events) { + if (!events || !events.data) { return null; } @@ -71,10 +71,10 @@ class RelatedEvents extends AsyncComponent { <InlineSvg src="icon-link" size="12px" /> {t('Related Events')} - {events.length < 1 ? ( + {events.data.length < 1 ? ( {t('No related events found.')} ) : ( - events.map(item => { + events.data.map(item => { const eventUrl = { pathname: location.pathname, query: { diff --git a/src/sentry/static/sentry/app/views/organizationEventsV2/table.jsx b/src/sentry/static/sentry/app/views/organizationEventsV2/table.jsx index 45b0e4b538a028..35b978c71418c1 100644 --- a/src/sentry/static/sentry/app/views/organizationEventsV2/table.jsx +++ b/src/sentry/static/sentry/app/views/organizationEventsV2/table.jsx @@ -10,26 +10,27 @@ import LoadingContainer from 'app/components/loading/loadingContainer'; import {t} from 'app/locale'; import space from 'app/styles/space'; -import {SPECIAL_FIELDS} from './data'; -import {QueryLink} from './styles'; +import {FIELD_FORMATTERS, SPECIAL_FIELDS} from './data'; +import {getFieldRenderer} from './utils'; import SortLink from './sortLink'; export default class Table extends React.Component { static propTypes = { view: SentryTypes.EventView.isRequired, - data: PropTypes.arrayOf(PropTypes.object), + data: PropTypes.object, isLoading: PropTypes.bool.isRequired, organization: SentryTypes.Organization.isRequired, location: PropTypes.object, }; renderBody() { - const {view, data, organization, location, isLoading} = this.props; - const {fields} = view.data; + const {view, organization, location, isLoading} = this.props; - if (!data || isLoading) { + if (!this.props.data || isLoading) { return null; } + const {fields} = view.data; + const {data, meta} = this.props.data; if (data.length === 0) { return ( @@ -42,22 +43,8 @@ export default class Table extends React.Component { return data.map((row, idx) => ( {fields.map(field => { - const target = { - pathname: `/organizations/${organization.slug}/events/`, - query: { - ...location.query, - query: `${field}:${row[field]}`, - }, - }; - return ( - - {SPECIAL_FIELDS.hasOwnProperty(field) ? ( - SPECIAL_FIELDS[field].renderFunc(row, {organization, location}) - ) : ( - {row[field]} - )} - - ); + const fieldRenderer = getFieldRenderer(field, meta); + return {fieldRenderer(row, {organization, location})}; })} )); @@ -73,9 +60,12 @@ export default class Table extends React.Component { {fields.map((field, i) => { const title = columnNames[i] || field; + let sortKey = field; if (SPECIAL_FIELDS.hasOwnProperty(field)) { sortKey = SPECIAL_FIELDS[field].sortField; + } else if (FIELD_FORMATTERS.hasOwnProperty(field)) { + sortKey = FIELD_FORMATTERS[field].sortField ? field : false; } if (sortKey === false) { diff --git a/src/sentry/static/sentry/app/views/organizationEventsV2/utils.jsx b/src/sentry/static/sentry/app/views/organizationEventsV2/utils.jsx index f4f880453541b0..fcf8efa66b9a8b 100644 --- a/src/sentry/static/sentry/app/views/organizationEventsV2/utils.jsx +++ b/src/sentry/static/sentry/app/views/organizationEventsV2/utils.jsx @@ -1,8 +1,8 @@ -import {pick, get} from 'lodash'; +import {partial, pick, get} from 'lodash'; import {DEFAULT_PER_PAGE} from 'app/constants'; import {URL_PARAM} from 'app/constants/globalSelectionHeader'; -import {ALL_VIEWS} from './data'; +import {ALL_VIEWS, SPECIAL_FIELDS, FIELD_FORMATTERS} from './data'; /** * Given a view id, return the corresponding view object @@ -134,3 +134,23 @@ export function fetchTotalCount(api, orgSlug, query) { }) .then(res => res.count); } + +/** + * Get the field renderer for the named field and metadata + * + * @param {String} field name + * @param {object} metadata mapping. + * @returns {Function} + */ +export function getFieldRenderer(field, meta) { + if (SPECIAL_FIELDS.hasOwnProperty(field)) { + return SPECIAL_FIELDS[field].renderFunc; + } + // Inflect the field name so it will match the property in the result set. + const fieldName = field.replace(/^([^\(]+)\(([a-z\._+]+)\)$/, '$1_$2'); + const fieldType = meta[fieldName]; + if (FIELD_FORMATTERS.hasOwnProperty(fieldType)) { + return partial(FIELD_FORMATTERS[fieldType].renderFunc, fieldName); + } + return partial(FIELD_FORMATTERS.string.renderFunc, fieldName); +} diff --git a/tests/js/spec/views/organizationEventsV2/eventDetails.spec.jsx b/tests/js/spec/views/organizationEventsV2/eventDetails.spec.jsx index c9fa5ba9228878..fa8233fdfd61cd 100644 --- a/tests/js/spec/views/organizationEventsV2/eventDetails.spec.jsx +++ b/tests/js/spec/views/organizationEventsV2/eventDetails.spec.jsx @@ -13,14 +13,22 @@ describe('OrganizationEventsV2 > EventDetails', function() { beforeEach(function() { MockApiClient.addMockResponse({ url: '/organizations/org-slug/events/', - body: [ - { - id: 'deadbeef', - title: 'Oh no something bad', - 'project.name': 'project-slug', - timestamp: '2019-05-23T22:12:48+00:00', + body: { + meta: { + id: 'string', + title: 'string', + 'project.name': 'string', + timestamp: 'date', }, - ], + data: [ + { + id: 'deadbeef', + title: 'Oh no something bad', + 'project.name': 'project-slug', + timestamp: '2019-05-23T22:12:48+00:00', + }, + ], + }, }); MockApiClient.addMockResponse({ url: '/organizations/org-slug/events/project-slug:deadbeef/', diff --git a/tests/js/spec/views/organizationEventsV2/index.spec.jsx b/tests/js/spec/views/organizationEventsV2/index.spec.jsx index f7fc728f72c30a..df90963d787ab7 100644 --- a/tests/js/spec/views/organizationEventsV2/index.spec.jsx +++ b/tests/js/spec/views/organizationEventsV2/index.spec.jsx @@ -8,14 +8,22 @@ describe('OrganizationEventsV2', function() { beforeEach(function() { MockApiClient.addMockResponse({ url: '/organizations/org-slug/events/', - body: [ - { - id: 'deadbeef', - title: eventTitle, - 'project.name': 'project-slug', - timestamp: '2019-05-23T22:12:48+00:00', + body: { + meta: { + id: 'string', + title: 'string', + 'project.name': 'string', + timestamp: 'date', }, - ], + data: [ + { + id: 'deadbeef', + title: eventTitle, + 'project.name': 'project-slug', + timestamp: '2019-05-23T22:12:48+00:00', + }, + ], + }, }); MockApiClient.addMockResponse({ url: '/organizations/org-slug/events/project-slug:deadbeef/', From 9faa83a77ede8468570de4244ec4925cb4c4b606 Mon Sep 17 00:00:00 2001 From: Mark Story Date: Tue, 13 Aug 2019 11:49:04 -0400 Subject: [PATCH 3/3] Fix sorting on aggregates. Users shouldn't have to figure out our internal aliasing system when we can do it for them. --- src/sentry/api/event_search.py | 36 ++++++++++++++++++++++----- tests/sentry/api/test_event_search.py | 13 ++++++++++ 2 files changed, 43 insertions(+), 6 deletions(-) diff --git a/src/sentry/api/event_search.py b/src/sentry/api/event_search.py index c8464144462dfc..054df3727729fc 100644 --- a/src/sentry/api/event_search.py +++ b/src/sentry/api/event_search.py @@ -733,12 +733,35 @@ def validate_aggregate(field, match): "Invalid column '%s' in aggregate function '%s'" % (column, function_name)) -def validate_orderby(orderby, fields): +def resolve_orderby(orderby, fields, aggregations): + """ + We accept column names, aggregate functions, and aliases as order by + values. Aggregates and field aliases need to be resolve/validated. + """ orderby = orderby if isinstance(orderby, (list, tuple)) else [orderby] + validated = [] for column in orderby: - column = column.lstrip('-') - if column not in fields: - raise InvalidSearchQuery('Cannot order by an field that is not selected.') + bare_column = column.lstrip('-') + if bare_column in fields: + validated.append(column) + continue + + match = AGGREGATE_PATTERN.search(bare_column) + if match: + bare_column = get_aggregate_alias(match) + found = [agg[2] for agg in aggregations if agg[2] == bare_column] + if found: + prefix = '-' if column.startswith('-') else '' + validated.append(prefix + bare_column) + + if len(validated) == len(orderby): + return validated + + raise InvalidSearchQuery('Cannot order by an field that is not selected.') + + +def get_aggregate_alias(match): + return u'{}_{}'.format(match.group('function'), match.group('column')).rstrip('_') def resolve_field_list(fields, snuba_args): @@ -780,7 +803,7 @@ def resolve_field_list(fields, snuba_args): aggregations.append([ VALID_AGGREGATES[match.group('function')]['snuba_name'], match.group('column'), - u'{}_{}'.format(match.group('function'), match.group('column')).rstrip('_') + get_aggregate_alias(match) ]) rollup = snuba_args.get('rollup') @@ -804,7 +827,7 @@ def resolve_field_list(fields, snuba_args): orderby = snuba_args.get('orderby') if orderby: - validate_orderby(orderby, fields) + orderby = resolve_orderby(orderby, columns, aggregations) # If aggregations are present all columns # need to be added to the group by so that the query is valid. @@ -815,4 +838,5 @@ def resolve_field_list(fields, snuba_args): 'selected_columns': columns, 'aggregations': aggregations, 'groupby': groupby, + 'orderby': orderby } diff --git a/tests/sentry/api/test_event_search.py b/tests/sentry/api/test_event_search.py index 046706ed9667d6..e965483e42a25f 100644 --- a/tests/sentry/api/test_event_search.py +++ b/tests/sentry/api/test_event_search.py @@ -1349,3 +1349,16 @@ def test_orderby_field_alias(self): ['argMax(project_id, timestamp)', '', 'projectid'], ] assert result['groupby'] == [] + + def test_orderby_field_aggregate(self): + fields = ['count(id)', 'count_unique(user)'] + snuba_args = {'orderby': '-count(id)'} + result = resolve_field_list(fields, snuba_args) + assert result['orderby'] == ['-count_id'] + assert result['aggregations'] == [ + ['count', 'id', 'count_id'], + ['uniq', 'user', 'count_unique_user'], + ['argMax(event_id, timestamp)', '', 'latest_event'], + ['argMax(project_id, timestamp)', '', 'projectid'], + ] + assert result['groupby'] == []