mirror of
https://github.com/SigNoz/signoz.git
synced 2026-05-06 18:40:32 +01:00
Compare commits
33 Commits
fix-7233
...
v0.76.0-ca
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ae6bbc7192 | ||
|
|
da627b9779 | ||
|
|
5bd30af3f7 | ||
|
|
29f72451d8 | ||
|
|
85fe1a2a18 | ||
|
|
2115656a5b | ||
|
|
c9da6006db | ||
|
|
efe86b0a00 | ||
|
|
52780a7ad9 | ||
|
|
44b46c089b | ||
|
|
ca65b4148c | ||
|
|
064a522293 | ||
|
|
8563bcdacf | ||
|
|
727cd7747b | ||
|
|
a7ff27ef07 | ||
|
|
f61e33aa23 | ||
|
|
5bf79edb8b | ||
|
|
3e2c23d015 | ||
|
|
c6bd1dd283 | ||
|
|
51b4c8d85b | ||
|
|
697f16743f | ||
|
|
0f4e4473ef | ||
|
|
4eb2e0b97b | ||
|
|
8e5526c66c | ||
|
|
423561f652 | ||
|
|
dc61db6936 | ||
|
|
e9bba641bc | ||
|
|
fbc4e50136 | ||
|
|
6049ba194a | ||
|
|
068126db40 | ||
|
|
4b87ac6424 | ||
|
|
f47a4207a9 | ||
|
|
657240c71b |
@@ -1,13 +1,13 @@
|
||||
import { ConfigProvider } from 'antd';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import NotFound from 'components/NotFound';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ROUTES from 'constants/routes';
|
||||
import AppLayout from 'container/AppLayout';
|
||||
import useAnalytics from 'hooks/analytics/useAnalytics';
|
||||
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
@@ -15,6 +15,7 @@ import { LICENSE_PLAN_KEY } from 'hooks/useLicense';
|
||||
import { NotificationProvider } from 'hooks/useNotifications';
|
||||
import { ResourceProvider } from 'hooks/useResourceAttribute';
|
||||
import history from 'lib/history';
|
||||
import { identity, pickBy } from 'lodash-es';
|
||||
import posthog from 'posthog-js';
|
||||
import AlertRuleProvider from 'providers/Alert';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
@@ -50,6 +51,8 @@ function App(): JSX.Element {
|
||||
} = useAppContext();
|
||||
const [routes, setRoutes] = useState<AppRoutes[]>(defaultRoutes);
|
||||
|
||||
const { trackPageView } = useAnalytics();
|
||||
|
||||
const { hostname, pathname } = window.location;
|
||||
|
||||
const {
|
||||
@@ -66,21 +69,18 @@ function App(): JSX.Element {
|
||||
|
||||
const { name, email, role } = user;
|
||||
|
||||
const domain = extractDomain(email);
|
||||
const hostNameParts = hostname.split('.');
|
||||
|
||||
const identifyPayload = {
|
||||
email,
|
||||
name,
|
||||
company_name: orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
role,
|
||||
source: 'signoz-ui',
|
||||
};
|
||||
|
||||
const sanitizedIdentifyPayload = pickBy(identifyPayload, identity);
|
||||
const domain = extractDomain(email);
|
||||
const hostNameParts = hostname.split('.');
|
||||
|
||||
const groupTraits = {
|
||||
name: orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
@@ -90,13 +90,8 @@ function App(): JSX.Element {
|
||||
source: 'signoz-ui',
|
||||
};
|
||||
|
||||
if (email) {
|
||||
logEvent('Email Identified', identifyPayload, 'identify');
|
||||
}
|
||||
|
||||
if (domain) {
|
||||
logEvent('Domain Identified', groupTraits, 'group');
|
||||
}
|
||||
window.analytics.identify(email, sanitizedIdentifyPayload);
|
||||
window.analytics.group(domain, groupTraits);
|
||||
|
||||
posthog?.identify(email, {
|
||||
email,
|
||||
@@ -197,7 +192,9 @@ function App(): JSX.Element {
|
||||
hide_default_launcher: false,
|
||||
});
|
||||
}
|
||||
}, [pathname]);
|
||||
|
||||
trackPageView(pathname);
|
||||
}, [pathname, trackPageView]);
|
||||
|
||||
useEffect(() => {
|
||||
// feature flag shouldn't be loading and featureFlags or fetchError any one of this should be true indicating that req is complete
|
||||
|
||||
@@ -7,15 +7,11 @@ import { EventSuccessPayloadProps } from 'types/api/events/types';
|
||||
const logEvent = async (
|
||||
eventName: string,
|
||||
attributes: Record<string, unknown>,
|
||||
eventType?: 'track' | 'group' | 'identify',
|
||||
rateLimited?: boolean,
|
||||
): Promise<SuccessResponse<EventSuccessPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post('/event', {
|
||||
eventName,
|
||||
attributes,
|
||||
eventType: eventType || 'track',
|
||||
rateLimited: rateLimited || false, // TODO: Update this once we have a proper way to handle rate limiting
|
||||
});
|
||||
|
||||
return {
|
||||
|
||||
@@ -392,16 +392,11 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
LOCALSTORAGE.DONT_SHOW_SLOW_API_WARNING,
|
||||
);
|
||||
|
||||
logEvent(
|
||||
`Slow API Warning`,
|
||||
{
|
||||
durationMs: data.duration,
|
||||
url: data.url,
|
||||
thresholdMs: data.threshold,
|
||||
},
|
||||
'track',
|
||||
true, // rate limited - controlled by Backend
|
||||
);
|
||||
logEvent(`Slow API Warning`, {
|
||||
duration: `${data.duration}ms`,
|
||||
url: data.url,
|
||||
threshold: data.threshold,
|
||||
});
|
||||
|
||||
const isDontShowSlowApiWarning = dontShowSlowApiWarning === 'true';
|
||||
|
||||
|
||||
@@ -173,7 +173,6 @@ function EditAlertChannels({
|
||||
const prepareEmailRequest = useCallback(
|
||||
() => ({
|
||||
name: selectedConfig?.name || '',
|
||||
send_resolved: selectedConfig?.send_resolved || false,
|
||||
to: selectedConfig.to || '',
|
||||
html: selectedConfig.html || '',
|
||||
headers: selectedConfig.headers || {},
|
||||
@@ -209,7 +208,6 @@ function EditAlertChannels({
|
||||
const preparePagerRequest = useCallback(
|
||||
() => ({
|
||||
name: selectedConfig.name || '',
|
||||
send_resolved: selectedConfig?.send_resolved || false,
|
||||
routing_key: selectedConfig.routing_key,
|
||||
client: selectedConfig.client,
|
||||
client_url: selectedConfig.client_url,
|
||||
@@ -263,7 +261,6 @@ function EditAlertChannels({
|
||||
const prepareOpsgenieRequest = useCallback(
|
||||
() => ({
|
||||
name: selectedConfig.name || '',
|
||||
send_resolved: selectedConfig?.send_resolved || false,
|
||||
api_key: selectedConfig.api_key || '',
|
||||
message: selectedConfig.message || '',
|
||||
description: selectedConfig.description || '',
|
||||
|
||||
@@ -19,6 +19,10 @@ jest.mock('hooks/useNotifications', () => ({
|
||||
})),
|
||||
}));
|
||||
|
||||
window.analytics = {
|
||||
track: jest.fn(),
|
||||
};
|
||||
|
||||
describe('Onboarding invite team member flow', () => {
|
||||
it('initial render and get started page', async () => {
|
||||
const { findByText } = render(
|
||||
|
||||
@@ -38,7 +38,7 @@ const useSampleLogs = ({
|
||||
filters: filter || initialFilters,
|
||||
aggregateOperator: LogsAggregatorOperator.NOOP,
|
||||
orderBy: [{ columnName: 'timestamp', order: 'desc' }],
|
||||
pageSize: count || DEFAULT_SAMPLE_LOGS_COUNT,
|
||||
limit: count || DEFAULT_SAMPLE_LOGS_COUNT,
|
||||
};
|
||||
return q;
|
||||
}, [count, filter]);
|
||||
|
||||
40
frontend/src/hooks/analytics/useAnalytics.tsx
Normal file
40
frontend/src/hooks/analytics/useAnalytics.tsx
Normal file
@@ -0,0 +1,40 @@
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useCallback } from 'react';
|
||||
import { extractDomain } from 'utils/app';
|
||||
|
||||
const useAnalytics = (): any => {
|
||||
const { user } = useAppContext();
|
||||
|
||||
// Segment Page View - analytics.page([category], [name], [properties], [options], [callback]);
|
||||
const trackPageView = useCallback(
|
||||
(pageName: string): void => {
|
||||
if (user && user.email) {
|
||||
window.analytics.page(null, pageName, {
|
||||
userId: user.email,
|
||||
});
|
||||
}
|
||||
},
|
||||
[user],
|
||||
);
|
||||
|
||||
const trackEvent = (
|
||||
eventName: string,
|
||||
properties?: Record<string, unknown>,
|
||||
): void => {
|
||||
if (user && user.email) {
|
||||
const context = {
|
||||
context: {
|
||||
groupId: extractDomain(user?.email),
|
||||
},
|
||||
};
|
||||
|
||||
const updatedProperties = { ...properties };
|
||||
updatedProperties.userId = user.email;
|
||||
window.analytics.track(eventName, properties, context);
|
||||
}
|
||||
};
|
||||
|
||||
return { trackPageView, trackEvent };
|
||||
};
|
||||
|
||||
export default useAnalytics;
|
||||
@@ -49,10 +49,12 @@
|
||||
/>
|
||||
<meta data-react-helmet="true" name="docusaurus_locale" content="en" />
|
||||
<meta data-react-helmet="true" name="docusaurus_tag" content="default" />
|
||||
<meta name="robots" content="noindex" />
|
||||
<meta name="robots" content="noindex">
|
||||
<link data-react-helmet="true" rel="shortcut icon" href="/favicon.ico" />
|
||||
|
||||
<link rel="stylesheet" href="/css/uPlot.min.css" />
|
||||
|
||||
|
||||
</head>
|
||||
<body>
|
||||
<noscript>You need to enable JavaScript to run this app.</noscript>
|
||||
@@ -98,16 +100,32 @@
|
||||
</script>
|
||||
<script>
|
||||
const CUSTOMERIO_ID = '<%= htmlWebpackPlugin.options.CUSTOMERIO_ID %>';
|
||||
const CUSTOMERIO_SITE_ID =
|
||||
'<%= htmlWebpackPlugin.options.CUSTOMERIO_SITE_ID %>';
|
||||
const CUSTOMERIO_SITE_ID = '<%= htmlWebpackPlugin.options.CUSTOMERIO_SITE_ID %>';
|
||||
!function(){var i="cioanalytics", analytics=(window[i]=window[i]||[]);if(!analytics.initialize)if(analytics.invoked)window.console&&console.error&&console.error("Snippet included twice.");else{analytics.invoked=!0;analytics.methods=["trackSubmit","trackClick","trackLink","trackForm","pageview","identify","reset","group","track","ready","alias","debug","page","once","off","on","addSourceMiddleware","addIntegrationMiddleware","setAnonymousId","addDestinationMiddleware"];analytics.factory=function(e){return function(){var t=Array.prototype.slice.call(arguments);t.unshift(e);analytics.push(t);return analytics}};for(var e=0;e<analytics.methods.length;e++){var key=analytics.methods[e];analytics[key]=analytics.factory(key)}analytics.load=function(key,e){var t=document.createElement("script");t.type="text/javascript";t.async=!0;t.setAttribute('data-global-customerio-analytics-key', i);t.src="https://cdp.customer.io/v1/analytics-js/snippet/" + key + "/analytics.min.js";var n=document.getElementsByTagName("script")[0];n.parentNode.insertBefore(t,n);analytics._writeKey=key;analytics._loadOptions=e};analytics.SNIPPET_VERSION="4.15.3";
|
||||
analytics.load(
|
||||
CUSTOMERIO_ID,
|
||||
{
|
||||
"integrations": {
|
||||
"Customer.io In-App Plugin": {
|
||||
siteId: CUSTOMERIO_SITE_ID
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
analytics.page();
|
||||
}}();
|
||||
</script>
|
||||
<script>
|
||||
//Set your SEGMENT_ID
|
||||
const SEGMENT_ID = '<%= htmlWebpackPlugin.options.SEGMENT_ID %>';
|
||||
|
||||
!(function () {
|
||||
var i = 'cioanalytics',
|
||||
analytics = (window[i] = window[i] || []);
|
||||
var analytics = (window.analytics = window.analytics || []);
|
||||
if (!analytics.initialize)
|
||||
if (analytics.invoked)
|
||||
window.console &&
|
||||
console.error &&
|
||||
console.error('Snippet included twice.');
|
||||
console.error('Segment snippet included twice.');
|
||||
else {
|
||||
analytics.invoked = !0;
|
||||
analytics.methods = [
|
||||
@@ -134,36 +152,35 @@
|
||||
];
|
||||
analytics.factory = function (e) {
|
||||
return function () {
|
||||
var t = Array.prototype.slice.call(arguments);
|
||||
t.unshift(e);
|
||||
analytics.push(t);
|
||||
if (window.analytics.initialized)
|
||||
return window.analytics[e].apply(window.analytics, arguments);
|
||||
var i = Array.prototype.slice.call(arguments);
|
||||
i.unshift(e);
|
||||
analytics.push(i);
|
||||
return analytics;
|
||||
};
|
||||
};
|
||||
for (var e = 0; e < analytics.methods.length; e++) {
|
||||
var key = analytics.methods[e];
|
||||
for (var i = 0; i < analytics.methods.length; i++) {
|
||||
var key = analytics.methods[i];
|
||||
analytics[key] = analytics.factory(key);
|
||||
}
|
||||
analytics.load = function (key, e) {
|
||||
analytics.load = function (key, i) {
|
||||
var t = document.createElement('script');
|
||||
t.type = 'text/javascript';
|
||||
t.async = !0;
|
||||
t.setAttribute('data-global-customerio-analytics-key', i);
|
||||
t.src =
|
||||
'https://cdp.customer.io/v1/analytics-js/snippet/' +
|
||||
'https://analytics-cdn.signoz.io/analytics.js/v1/' +
|
||||
key +
|
||||
'/analytics.min.js';
|
||||
var n = document.getElementsByTagName('script')[0];
|
||||
n.parentNode.insertBefore(t, n);
|
||||
analytics._writeKey = key;
|
||||
analytics._loadOptions = e;
|
||||
analytics._loadOptions = i;
|
||||
};
|
||||
analytics.SNIPPET_VERSION = '4.15.3';
|
||||
analytics.load(CUSTOMERIO_ID, {
|
||||
analytics._writeKey = SEGMENT_ID;
|
||||
analytics.SNIPPET_VERSION = '4.16.1';
|
||||
analytics.load(SEGMENT_ID, {
|
||||
integrations: {
|
||||
'Customer.io In-App Plugin': {
|
||||
siteId: CUSTOMERIO_SITE_ID,
|
||||
},
|
||||
'Segment.io': { apiHost: 'analytics-api.signoz.io/v1' },
|
||||
},
|
||||
});
|
||||
analytics.page();
|
||||
|
||||
@@ -21,6 +21,7 @@ const plugins = [
|
||||
new HtmlWebpackPlugin({
|
||||
template: 'src/index.html.ejs',
|
||||
INTERCOM_APP_ID: process.env.INTERCOM_APP_ID,
|
||||
SEGMENT_ID: process.env.SEGMENT_ID,
|
||||
CUSTOMERIO_SITE_ID: process.env.CUSTOMERIO_SITE_ID,
|
||||
CUSTOMERIO_ID: process.env.CUSTOMERIO_ID,
|
||||
POSTHOG_KEY: process.env.POSTHOG_KEY,
|
||||
@@ -40,6 +41,7 @@ const plugins = [
|
||||
FRONTEND_API_ENDPOINT: process.env.FRONTEND_API_ENDPOINT,
|
||||
WEBSOCKET_API_ENDPOINT: process.env.WEBSOCKET_API_ENDPOINT,
|
||||
INTERCOM_APP_ID: process.env.INTERCOM_APP_ID,
|
||||
SEGMENT_ID: process.env.SEGMENT_ID,
|
||||
CUSTOMERIO_SITE_ID: process.env.CUSTOMERIO_SITE_ID,
|
||||
CUSTOMERIO_ID: process.env.CUSTOMERIO_ID,
|
||||
POSTHOG_KEY: process.env.POSTHOG_KEY,
|
||||
|
||||
@@ -26,6 +26,7 @@ const plugins = [
|
||||
new HtmlWebpackPlugin({
|
||||
template: 'src/index.html.ejs',
|
||||
INTERCOM_APP_ID: process.env.INTERCOM_APP_ID,
|
||||
SEGMENT_ID: process.env.SEGMENT_ID,
|
||||
CUSTOMERIO_SITE_ID: process.env.CUSTOMERIO_SITE_ID,
|
||||
CUSTOMERIO_ID: process.env.CUSTOMERIO_ID,
|
||||
POSTHOG_KEY: process.env.POSTHOG_KEY,
|
||||
@@ -50,6 +51,7 @@ const plugins = [
|
||||
FRONTEND_API_ENDPOINT: process.env.FRONTEND_API_ENDPOINT,
|
||||
WEBSOCKET_API_ENDPOINT: process.env.WEBSOCKET_API_ENDPOINT,
|
||||
INTERCOM_APP_ID: process.env.INTERCOM_APP_ID,
|
||||
SEGMENT_ID: process.env.SEGMENT_ID,
|
||||
CUSTOMERIO_SITE_ID: process.env.CUSTOMERIO_SITE_ID,
|
||||
CUSTOMERIO_ID: process.env.CUSTOMERIO_ID,
|
||||
POSTHOG_KEY: process.env.POSTHOG_KEY,
|
||||
|
||||
4
go.mod
4
go.mod
@@ -69,7 +69,7 @@ require (
|
||||
go.opentelemetry.io/otel/trace v1.34.0
|
||||
go.uber.org/multierr v1.11.0
|
||||
go.uber.org/zap v1.27.0
|
||||
golang.org/x/crypto v0.32.0
|
||||
golang.org/x/crypto v0.31.0
|
||||
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842
|
||||
golang.org/x/net v0.33.0
|
||||
golang.org/x/oauth2 v0.24.0
|
||||
@@ -116,7 +116,7 @@ require (
|
||||
github.com/fsnotify/fsnotify v1.7.0 // indirect
|
||||
github.com/go-faster/city v1.0.1 // indirect
|
||||
github.com/go-faster/errors v0.7.1 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.0.5 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.0.2 // indirect
|
||||
github.com/go-logfmt/logfmt v0.6.0 // indirect
|
||||
github.com/go-logr/logr v1.4.2 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
|
||||
12
go.sum
12
go.sum
@@ -262,8 +262,8 @@ github.com/go-faster/errors v0.7.1/go.mod h1:5ySTjWFiphBs07IKuiL69nxdfd5+fzh1u7F
|
||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-jose/go-jose/v4 v4.0.5 h1:M6T8+mKZl/+fNNuFHvGIzDz7BTLQPIounk/b9dw3AaE=
|
||||
github.com/go-jose/go-jose/v4 v4.0.5/go.mod h1:s3P1lRrkT8igV8D9OjyL4WRyHvjB6a4JSllnOrmmBOA=
|
||||
github.com/go-jose/go-jose/v4 v4.0.2 h1:R3l3kkBds16bO7ZFAEEcofK0MkrAJt3jlJznWZG0nvk=
|
||||
github.com/go-jose/go-jose/v4 v4.0.2/go.mod h1:WVf9LFMHh/QVrmqrOfqun0C45tMe3RoiKJMPvgWwLfY=
|
||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
|
||||
@@ -1108,8 +1108,8 @@ golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm
|
||||
golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc=
|
||||
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
|
||||
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
@@ -1332,8 +1332,8 @@ golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg=
|
||||
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
|
||||
golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q=
|
||||
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
|
||||
@@ -48,9 +48,6 @@ const (
|
||||
defaultTraceLocalTableName string = "signoz_index_v3"
|
||||
defaultTraceResourceTableV3 string = "distributed_traces_v3_resource"
|
||||
defaultTraceSummaryTable string = "distributed_trace_summary"
|
||||
|
||||
defaultMetadataDB string = "signoz_metadata"
|
||||
defaultMetadataTable string = "distributed_attributes_metadata"
|
||||
)
|
||||
|
||||
// NamespaceConfig is Clickhouse's internal configuration data
|
||||
@@ -91,8 +88,6 @@ type namespaceConfig struct {
|
||||
TraceLocalTableNameV3 string
|
||||
TraceResourceTableV3 string
|
||||
TraceSummaryTable string
|
||||
MetadataDB string
|
||||
MetadataTable string
|
||||
}
|
||||
|
||||
// Connecto defines how to connect to the database
|
||||
@@ -146,8 +141,6 @@ func NewOptions(
|
||||
TraceLocalTableNameV3: defaultTraceLocalTableName,
|
||||
TraceResourceTableV3: defaultTraceResourceTableV3,
|
||||
TraceSummaryTable: defaultTraceSummaryTable,
|
||||
MetadataDB: defaultMetadataDB,
|
||||
MetadataTable: defaultMetadataTable,
|
||||
},
|
||||
others: make(map[string]*namespaceConfig, len(otherNamespaces)),
|
||||
}
|
||||
|
||||
@@ -164,8 +164,6 @@ type ClickHouseReader struct {
|
||||
|
||||
fluxIntervalForTraceDetail time.Duration
|
||||
cache cache.Cache
|
||||
metadataDB string
|
||||
metadataTable string
|
||||
}
|
||||
|
||||
// NewTraceReader returns a TraceReader for the database
|
||||
@@ -261,8 +259,6 @@ func NewReaderFromClickhouseConnection(
|
||||
|
||||
fluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
|
||||
cache: cache,
|
||||
metadataDB: options.primary.MetadataDB,
|
||||
metadataTable: options.primary.MetadataTable,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4130,97 +4126,6 @@ func (r *ClickHouseReader) GetLogAttributeKeys(ctx context.Context, req *v3.Filt
|
||||
return &response, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) FetchRelatedValues(ctx context.Context, req *v3.FilterAttributeValueRequest) ([]string, error) {
|
||||
var andConditions []string
|
||||
|
||||
andConditions = append(andConditions, fmt.Sprintf("unix_milli >= %d", req.StartTimeMillis))
|
||||
andConditions = append(andConditions, fmt.Sprintf("unix_milli <= %d", req.EndTimeMillis))
|
||||
|
||||
if len(req.ExistingFilterItems) != 0 {
|
||||
for _, item := range req.ExistingFilterItems {
|
||||
// we only support string for related values
|
||||
if item.Key.DataType != v3.AttributeKeyDataTypeString {
|
||||
continue
|
||||
}
|
||||
|
||||
var colName string
|
||||
switch item.Key.Type {
|
||||
case v3.AttributeKeyTypeResource:
|
||||
colName = "resource_attributes"
|
||||
case v3.AttributeKeyTypeTag:
|
||||
colName = "attributes"
|
||||
default:
|
||||
// we only support resource and tag for related values as of now
|
||||
continue
|
||||
}
|
||||
// IN doesn't make use of map value index, we convert it to = or !=
|
||||
operator := item.Operator
|
||||
if v3.FilterOperator(strings.ToLower(string(item.Operator))) == v3.FilterOperatorIn {
|
||||
operator = "="
|
||||
} else if v3.FilterOperator(strings.ToLower(string(item.Operator))) == v3.FilterOperatorNotIn {
|
||||
operator = "!="
|
||||
}
|
||||
addCondition := func(val string) {
|
||||
andConditions = append(andConditions, fmt.Sprintf("mapContains(%s, '%s') AND %s['%s'] %s %s", colName, item.Key.Key, colName, item.Key.Key, operator, val))
|
||||
}
|
||||
switch v := item.Value.(type) {
|
||||
case string:
|
||||
fmtVal := utils.ClickHouseFormattedValue(v)
|
||||
addCondition(fmtVal)
|
||||
case []string:
|
||||
for _, val := range v {
|
||||
fmtVal := utils.ClickHouseFormattedValue(val)
|
||||
addCondition(fmtVal)
|
||||
}
|
||||
case []interface{}:
|
||||
for _, val := range v {
|
||||
fmtVal := utils.ClickHouseFormattedValue(val)
|
||||
addCondition(fmtVal)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
whereClause := strings.Join(andConditions, " AND ")
|
||||
|
||||
var selectColumn string
|
||||
switch req.TagType {
|
||||
case v3.TagTypeResource:
|
||||
selectColumn = "resource_attributes" + "['" + req.FilterAttributeKey + "']"
|
||||
case v3.TagTypeTag:
|
||||
selectColumn = "attributes" + "['" + req.FilterAttributeKey + "']"
|
||||
default:
|
||||
selectColumn = "attributes" + "['" + req.FilterAttributeKey + "']"
|
||||
}
|
||||
|
||||
filterSubQuery := fmt.Sprintf(
|
||||
"SELECT DISTINCT %s FROM %s.%s WHERE %s LIMIT 100",
|
||||
selectColumn,
|
||||
r.metadataDB,
|
||||
r.metadataTable,
|
||||
whereClause,
|
||||
)
|
||||
zap.L().Debug("filterSubQuery for related values", zap.String("query", filterSubQuery))
|
||||
|
||||
rows, err := r.db.Query(ctx, filterSubQuery)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error while executing query: %s", err.Error())
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var attributeValues []string
|
||||
for rows.Next() {
|
||||
var value string
|
||||
if err := rows.Scan(&value); err != nil {
|
||||
return nil, fmt.Errorf("error while scanning rows: %s", err.Error())
|
||||
}
|
||||
if value != "" {
|
||||
attributeValues = append(attributeValues, value)
|
||||
}
|
||||
}
|
||||
|
||||
return attributeValues, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetLogAttributeValues(ctx context.Context, req *v3.FilterAttributeValueRequest) (*v3.FilterAttributeValueResponse, error) {
|
||||
var err error
|
||||
var filterValueColumn string
|
||||
@@ -4322,13 +4227,6 @@ func (r *ClickHouseReader) GetLogAttributeValues(ctx context.Context, req *v3.Fi
|
||||
}
|
||||
}
|
||||
|
||||
if req.IncludeRelated {
|
||||
relatedValues, _ := r.FetchRelatedValues(ctx, req)
|
||||
attributeValues.RelatedValues = &v3.FilterAttributeValueResponse{
|
||||
StringAttributeValues: relatedValues,
|
||||
}
|
||||
}
|
||||
|
||||
return &attributeValues, nil
|
||||
|
||||
}
|
||||
@@ -5009,13 +4907,6 @@ func (r *ClickHouseReader) GetTraceAttributeValues(ctx context.Context, req *v3.
|
||||
}
|
||||
}
|
||||
|
||||
if req.IncludeRelated {
|
||||
relatedValues, _ := r.FetchRelatedValues(ctx, req)
|
||||
attributeValues.RelatedValues = &v3.FilterAttributeValueResponse{
|
||||
StringAttributeValues: relatedValues,
|
||||
}
|
||||
}
|
||||
|
||||
return &attributeValues, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -395,12 +395,6 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *AuthMid
|
||||
withCacheControl(AutoCompleteCacheControlAge, aH.autoCompleteAttributeKeys))).Methods(http.MethodGet)
|
||||
subRouter.HandleFunc("/autocomplete/attribute_values", am.ViewAccess(
|
||||
withCacheControl(AutoCompleteCacheControlAge, aH.autoCompleteAttributeValues))).Methods(http.MethodGet)
|
||||
|
||||
// autocomplete with filters using new endpoints
|
||||
// Note: eventually all autocomplete APIs should be migrated to new endpoint with appropriate filters, deprecating the older ones
|
||||
|
||||
subRouter.HandleFunc("/auto_complete/attribute_values", am.ViewAccess(aH.autoCompleteAttributeValuesPost)).Methods(http.MethodPost)
|
||||
|
||||
subRouter.HandleFunc("/query_range", am.ViewAccess(aH.QueryRangeV3)).Methods(http.MethodPost)
|
||||
subRouter.HandleFunc("/query_range/format", am.ViewAccess(aH.QueryRangeV3Format)).Methods(http.MethodPost)
|
||||
|
||||
@@ -1670,14 +1664,7 @@ func (aH *APIHandler) registerEvent(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if ok {
|
||||
switch request.EventType {
|
||||
case model.TrackEvent:
|
||||
telemetry.GetInstance().SendEvent(request.EventName, request.Attributes, claims.Email, request.RateLimited, true)
|
||||
case model.GroupEvent:
|
||||
telemetry.GetInstance().SendGroupEvent(request.Attributes)
|
||||
case model.IdentifyEvent:
|
||||
telemetry.GetInstance().SendIdentifyEvent(request.Attributes)
|
||||
}
|
||||
telemetry.GetInstance().SendEvent(request.EventName, request.Attributes, claims.Email, request.RateLimited, true)
|
||||
aH.WriteJSON(w, r, map[string]string{"data": "Event Processed Successfully"})
|
||||
} else {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
@@ -4840,35 +4827,6 @@ func (aH *APIHandler) autoCompleteAttributeValues(w http.ResponseWriter, r *http
|
||||
aH.Respond(w, response)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) autoCompleteAttributeValuesPost(w http.ResponseWriter, r *http.Request) {
|
||||
var response *v3.FilterAttributeValueResponse
|
||||
req, err := parseFilterAttributeValueRequestBody(r)
|
||||
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
switch req.DataSource {
|
||||
case v3.DataSourceMetrics:
|
||||
response, err = aH.reader.GetMetricAttributeValues(r.Context(), req)
|
||||
case v3.DataSourceLogs:
|
||||
response, err = aH.reader.GetLogAttributeValues(r.Context(), req)
|
||||
case v3.DataSourceTraces:
|
||||
response, err = aH.reader.GetTraceAttributeValues(r.Context(), req)
|
||||
default:
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("invalid data source")}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, response)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getSpanKeysV3(ctx context.Context, queryRangeParams *v3.QueryRangeParamsV3) (map[string]v3.AttributeKey, error) {
|
||||
data := map[string]v3.AttributeKey{}
|
||||
for _, query := range queryRangeParams.CompositeQuery.BuilderQueries {
|
||||
|
||||
@@ -68,12 +68,7 @@ func parseRegisterEventRequest(r *http.Request) (*model.RegisterEventParams, err
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Validate the event type
|
||||
if !postData.EventType.IsValid() {
|
||||
return nil, errors.New("eventType param missing/incorrect in query")
|
||||
}
|
||||
|
||||
if postData.EventType == model.TrackEvent && postData.EventName == "" {
|
||||
if postData.EventName == "" {
|
||||
return nil, errors.New("eventName param missing in query")
|
||||
}
|
||||
|
||||
@@ -741,25 +736,6 @@ func parseFilterAttributeKeyRequest(r *http.Request) (*v3.FilterAttributeKeyRequ
|
||||
return &req, nil
|
||||
}
|
||||
|
||||
func parseFilterAttributeValueRequestBody(r *http.Request) (*v3.FilterAttributeValueRequest, error) {
|
||||
|
||||
var req v3.FilterAttributeValueRequest
|
||||
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := req.Validate(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// offset by two windows periods for start for better results
|
||||
req.StartTimeMillis = req.StartTimeMillis - time.Hour.Milliseconds()*6*2
|
||||
req.EndTimeMillis = req.EndTimeMillis + time.Hour.Milliseconds()*6
|
||||
|
||||
return &req, nil
|
||||
}
|
||||
|
||||
func parseFilterAttributeValueRequest(r *http.Request) (*v3.FilterAttributeValueRequest, error) {
|
||||
|
||||
var req v3.FilterAttributeValueRequest
|
||||
|
||||
@@ -11,74 +11,4 @@ var preferenceMap = map[string]Preference{
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"org"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_DO_LATER": {
|
||||
Key: "WELCOME_CHECKLIST_DO_LATER",
|
||||
Name: "Welcome Checklist Do Later",
|
||||
Description: "Welcome Checklist Do Later",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SEND_LOGS_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SEND_LOGS_SKIPPED",
|
||||
Name: "Welcome Checklist Send Logs Skipped",
|
||||
Description: "Welcome Checklist Send Logs Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SEND_TRACES_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SEND_TRACES_SKIPPED",
|
||||
Name: "Welcome Checklist Send Traces Skipped",
|
||||
Description: "Welcome Checklist Send Traces Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SEND_INFRA_METRICS_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SEND_INFRA_METRICS_SKIPPED",
|
||||
Name: "Welcome Checklist Send Infra Metrics Skipped",
|
||||
Description: "Welcome Checklist Send Infra Metrics Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SETUP_DASHBOARDS_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SETUP_DASHBOARDS_SKIPPED",
|
||||
Name: "Welcome Checklist Setup Dashboards Skipped",
|
||||
Description: "Welcome Checklist Setup Dashboards Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SETUP_ALERTS_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SETUP_ALERTS_SKIPPED",
|
||||
Name: "Welcome Checklist Setup Alerts Skipped",
|
||||
Description: "Welcome Checklist Setup Alerts Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SETUP_SAVED_VIEW_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SETUP_SAVED_VIEW_SKIPPED",
|
||||
Name: "Welcome Checklist Setup Saved View Skipped",
|
||||
Description: "Welcome Checklist Setup Saved View Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -122,6 +122,7 @@ func (q *querier) runBuilderQuery(
|
||||
misses := q.queryCache.FindMissingTimeRanges(start, end, builderQuery.StepInterval, cacheKeys[queryName])
|
||||
zap.L().Info("cache misses for logs query", zap.Any("misses", misses))
|
||||
missedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
filteredMissedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
for _, miss := range misses {
|
||||
query, err = prepareLogsQuery(ctx, q.UseLogsNewSchema, miss.Start, miss.End, builderQuery, params, preferRPM)
|
||||
if err != nil {
|
||||
@@ -138,15 +139,32 @@ func (q *querier) runBuilderQuery(
|
||||
}
|
||||
return
|
||||
}
|
||||
filteredSeries, startTime, endTime := common.FilterSeriesPoints(series, miss.Start, miss.End, builderQuery.StepInterval)
|
||||
|
||||
// making sure that empty range doesn't doesn't enter the cache
|
||||
// empty results from filteredSeries means data was filtered out, but empty series means actual empty data
|
||||
if len(filteredSeries) > 0 || len(series) == 0 {
|
||||
filteredMissedSeries = append(filteredMissedSeries, querycache.CachedSeriesData{
|
||||
Data: filteredSeries,
|
||||
Start: startTime,
|
||||
End: endTime,
|
||||
})
|
||||
}
|
||||
|
||||
// for the actual response
|
||||
missedSeries = append(missedSeries, querycache.CachedSeriesData{
|
||||
Data: series,
|
||||
Start: miss.Start,
|
||||
End: miss.End,
|
||||
Data: series,
|
||||
})
|
||||
}
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesData(cacheKeys[queryName], missedSeries)
|
||||
|
||||
resultSeries := common.GetSeriesFromCachedData(mergedSeries, start, end)
|
||||
filteredMergedSeries := q.queryCache.MergeWithCachedSeriesDataV2(cacheKeys[queryName], filteredMissedSeries)
|
||||
q.queryCache.StoreSeriesInCache(cacheKeys[queryName], filteredMergedSeries)
|
||||
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesDataV2(cacheKeys[queryName], missedSeries)
|
||||
|
||||
resultSeries := common.GetSeriesFromCachedDataV2(mergedSeries, start, end, builderQuery.StepInterval)
|
||||
|
||||
ch <- channelResult{
|
||||
Err: nil,
|
||||
|
||||
@@ -119,9 +119,10 @@ func (q *querier) runBuilderQuery(
|
||||
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series}
|
||||
return
|
||||
}
|
||||
misses := q.queryCache.FindMissingTimeRanges(start, end, builderQuery.StepInterval, cacheKeys[queryName])
|
||||
misses := q.queryCache.FindMissingTimeRangesV2(start, end, builderQuery.StepInterval, cacheKeys[queryName])
|
||||
zap.L().Info("cache misses for logs query", zap.Any("misses", misses))
|
||||
missedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
filteredMissedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
for _, miss := range misses {
|
||||
query, err = prepareLogsQuery(ctx, q.UseLogsNewSchema, miss.Start, miss.End, builderQuery, params, preferRPM)
|
||||
if err != nil {
|
||||
@@ -138,15 +139,33 @@ func (q *querier) runBuilderQuery(
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
filteredSeries, startTime, endTime := common.FilterSeriesPoints(series, miss.Start, miss.End, builderQuery.StepInterval)
|
||||
|
||||
// making sure that empty range doesn't doesn't enter the cache
|
||||
// empty results from filteredSeries means data was filtered out, but empty series means actual empty data
|
||||
if len(filteredSeries) > 0 || len(series) == 0 {
|
||||
filteredMissedSeries = append(filteredMissedSeries, querycache.CachedSeriesData{
|
||||
Data: filteredSeries,
|
||||
Start: startTime,
|
||||
End: endTime,
|
||||
})
|
||||
}
|
||||
|
||||
// for the actual response
|
||||
missedSeries = append(missedSeries, querycache.CachedSeriesData{
|
||||
Data: series,
|
||||
Start: miss.Start,
|
||||
End: miss.End,
|
||||
})
|
||||
}
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesData(cacheKeys[queryName], missedSeries)
|
||||
|
||||
resultSeries := common.GetSeriesFromCachedData(mergedSeries, start, end)
|
||||
filteredMergedSeries := q.queryCache.MergeWithCachedSeriesDataV2(cacheKeys[queryName], filteredMissedSeries)
|
||||
q.queryCache.StoreSeriesInCache(cacheKeys[queryName], filteredMergedSeries)
|
||||
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesDataV2(cacheKeys[queryName], missedSeries)
|
||||
|
||||
resultSeries := common.GetSeriesFromCachedDataV2(mergedSeries, start, end, builderQuery.StepInterval)
|
||||
|
||||
ch <- channelResult{
|
||||
Err: nil,
|
||||
|
||||
@@ -3,6 +3,7 @@ package common
|
||||
import (
|
||||
"math"
|
||||
"regexp"
|
||||
"sort"
|
||||
"time"
|
||||
"unicode"
|
||||
|
||||
@@ -123,3 +124,108 @@ func GetSeriesFromCachedData(data []querycache.CachedSeriesData, start, end int6
|
||||
}
|
||||
return newSeries
|
||||
}
|
||||
|
||||
// It is different from GetSeriesFromCachedData because doesn't remove a point if it is >= (start - (start % step*1000))
|
||||
func GetSeriesFromCachedDataV2(data []querycache.CachedSeriesData, start, end, step int64) []*v3.Series {
|
||||
series := make(map[uint64]*v3.Series)
|
||||
|
||||
for _, cachedData := range data {
|
||||
for _, data := range cachedData.Data {
|
||||
h := labels.FromMap(data.Labels).Hash()
|
||||
|
||||
if _, ok := series[h]; !ok {
|
||||
series[h] = &v3.Series{
|
||||
Labels: data.Labels,
|
||||
LabelsArray: data.LabelsArray,
|
||||
Points: make([]v3.Point, 0),
|
||||
}
|
||||
}
|
||||
|
||||
for _, point := range data.Points {
|
||||
if point.Timestamp >= (start-(start%(step*1000))) && point.Timestamp <= end {
|
||||
series[h].Points = append(series[h].Points, point)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
newSeries := make([]*v3.Series, 0, len(series))
|
||||
for _, s := range series {
|
||||
s.SortPoints()
|
||||
s.RemoveDuplicatePoints()
|
||||
newSeries = append(newSeries, s)
|
||||
}
|
||||
return newSeries
|
||||
}
|
||||
|
||||
// filter series points for storing in cache
|
||||
func FilterSeriesPoints(seriesList []*v3.Series, missStart, missEnd int64, stepInterval int64) ([]*v3.Series, int64, int64) {
|
||||
filteredSeries := make([]*v3.Series, 0)
|
||||
startTime := missStart
|
||||
endTime := missEnd
|
||||
|
||||
stepMs := stepInterval * 1000
|
||||
|
||||
// return empty series if the interval is not complete
|
||||
if missStart+stepMs > missEnd {
|
||||
return []*v3.Series{}, missStart, missEnd
|
||||
}
|
||||
|
||||
// if the end time is not a complete aggregation window, then we will have to adjust the end time
|
||||
// to the previous complete aggregation window end
|
||||
endCompleteWindow := missEnd%stepMs == 0
|
||||
if !endCompleteWindow {
|
||||
endTime = missEnd - (missEnd % stepMs)
|
||||
}
|
||||
|
||||
// if the start time is not a complete aggregation window, then we will have to adjust the start time
|
||||
// to the next complete aggregation window
|
||||
if missStart%stepMs != 0 {
|
||||
startTime = missStart + stepMs - (missStart % stepMs)
|
||||
}
|
||||
|
||||
for _, series := range seriesList {
|
||||
// if data for the series is empty, then we will add it to the cache
|
||||
if len(series.Points) == 0 {
|
||||
filteredSeries = append(filteredSeries, &v3.Series{
|
||||
Labels: series.Labels,
|
||||
LabelsArray: series.LabelsArray,
|
||||
Points: make([]v3.Point, 0),
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
// Sort the points based on timestamp
|
||||
sort.Slice(series.Points, func(i, j int) bool {
|
||||
return series.Points[i].Timestamp < series.Points[j].Timestamp
|
||||
})
|
||||
|
||||
points := make([]v3.Point, len(series.Points))
|
||||
copy(points, series.Points)
|
||||
|
||||
// Filter the first point that is not a complete aggregation window
|
||||
if series.Points[0].Timestamp < missStart {
|
||||
// Remove the first point
|
||||
points = points[1:]
|
||||
}
|
||||
|
||||
// filter the last point if it is not a complete aggregation window
|
||||
// adding or condition to handle the end time is equal to a complete window end https://github.com/SigNoz/signoz/pull/7212#issuecomment-2703677190
|
||||
if (!endCompleteWindow && series.Points[len(series.Points)-1].Timestamp == missEnd-(missEnd%stepMs)) ||
|
||||
(endCompleteWindow && series.Points[len(series.Points)-1].Timestamp == missEnd) {
|
||||
// Remove the last point
|
||||
points = points[:len(points)-1]
|
||||
}
|
||||
|
||||
// making sure that empty range doesn't enter the cache
|
||||
if len(points) > 0 {
|
||||
filteredSeries = append(filteredSeries, &v3.Series{
|
||||
Labels: series.Labels,
|
||||
LabelsArray: series.LabelsArray,
|
||||
Points: points,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return filteredSeries, startTime, endTime
|
||||
}
|
||||
|
||||
435
pkg/query-service/common/query_range_test.go
Normal file
435
pkg/query-service/common/query_range_test.go
Normal file
@@ -0,0 +1,435 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/querycache"
|
||||
)
|
||||
|
||||
func TestFilterSeriesPoints(t *testing.T) {
|
||||
// Define test cases
|
||||
testCases := []struct {
|
||||
name string
|
||||
seriesList []*v3.Series
|
||||
missStart int64 // in milliseconds
|
||||
missEnd int64 // in milliseconds
|
||||
stepInterval int64 // in seconds
|
||||
expectedPoints []*v3.Series
|
||||
expectedStart int64 // in milliseconds
|
||||
expectedEnd int64 // in milliseconds
|
||||
}{
|
||||
{
|
||||
name: "Complete aggregation window",
|
||||
missStart: 1609459200000, // 01 Jan 2021 00:00:00 UTC
|
||||
missEnd: 1609466400000, // 01 Jan 2021 02:00:00 UTC
|
||||
stepInterval: 3600, // 1 hour
|
||||
seriesList: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609459200000, Value: 1.0}, // 01 Jan 2021 00:00:00 UTC
|
||||
{Timestamp: 1609462800000, Value: 2.0}, // 01 Jan 2021 01:00:00 UTC
|
||||
{Timestamp: 1609466400000, Value: 3.0}, // 01 Jan 2021 02:00:00 UTC
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedPoints: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609459200000, Value: 1.0},
|
||||
{Timestamp: 1609462800000, Value: 2.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedStart: 1609459200000,
|
||||
expectedEnd: 1609466400000,
|
||||
},
|
||||
{
|
||||
name: "Filter first point",
|
||||
missStart: 1609464600000, // 01 Jan 2021 01:30:00 UTC
|
||||
missEnd: 1609470000000, // 01 Jan 2021 03:00:00 UTC
|
||||
stepInterval: 3600, // 1 hour
|
||||
seriesList: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609462800000, Value: 2.0}, // 01 Jan 2021 01:00:00 UTC
|
||||
{Timestamp: 1609466400000, Value: 3.0}, // 01 Jan 2021 02:00:00 UTC
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedPoints: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609466400000, Value: 3.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedStart: 1609466400000,
|
||||
expectedEnd: 1609470000000,
|
||||
},
|
||||
{
|
||||
name: "Filter last point",
|
||||
missStart: 1609466400000, // 01 Jan 2021 02:00:00 UTC
|
||||
missEnd: 1609471800000, // 01 Jan 2021 03:30:00 UTC
|
||||
stepInterval: 3600, // 1 hour
|
||||
seriesList: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609466400000, Value: 3.0}, // 01 Jan 2021 02:00:00 UTC
|
||||
{Timestamp: 1609470000000, Value: 3.0}, // 01 Jan 2021 03:00:00 UTC
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedPoints: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609466400000, Value: 3.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedStart: 1609466400000,
|
||||
expectedEnd: 1609470000000,
|
||||
},
|
||||
{
|
||||
name: "Incomplete aggregation window",
|
||||
missStart: 1609470000000, // 01 Jan 2021 03:00:00 UTC
|
||||
missEnd: 1609471800000, // 01 Jan 2021 03:30:00 UTC
|
||||
stepInterval: 3600, // 1 hour
|
||||
seriesList: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{},
|
||||
},
|
||||
},
|
||||
expectedPoints: []*v3.Series{},
|
||||
expectedStart: 1609470000000,
|
||||
expectedEnd: 1609471800000,
|
||||
},
|
||||
{
|
||||
name: "Filter first point with multiple series",
|
||||
missStart: 1609464600000, // 01 Jan 2021 01:30:00 UTC
|
||||
missEnd: 1609477200000, // 01 Jan 2021 05:00:00 UTC
|
||||
stepInterval: 3600, // 1 hour
|
||||
seriesList: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609462800000, Value: 2.0}, // 01 Jan 2021 01:00:00 UTC
|
||||
{Timestamp: 1609466400000, Value: 3.0}, // 01 Jan 2021 02:00:00 UTC
|
||||
{Timestamp: 1609470000000, Value: 4.0}, // 01 Jan 2021 03:00:00 UTC
|
||||
{Timestamp: 1609473600000, Value: 5.0}, // 01 Jan 2021 04:00:00 UTC
|
||||
},
|
||||
},
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609466400000, Value: 6.0}, // 01 Jan 2021 02:00:00 UTC
|
||||
{Timestamp: 1609470000000, Value: 7.0}, // 01 Jan 2021 03:00:00 UTC
|
||||
{Timestamp: 1609473600000, Value: 8.0}, // 01 Jan 2021 04:00:00 UTC
|
||||
},
|
||||
},
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609466400000, Value: 9.0}, // 01 Jan 2021 02:00:00 UTC
|
||||
{Timestamp: 1609470000000, Value: 10.0}, // 01 Jan 2021 03:00:00 UTC
|
||||
{Timestamp: 1609473600000, Value: 11.0}, // 01 Jan 2021 04:00:00 UTC
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedPoints: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609466400000, Value: 3.0}, // 01 Jan 2021 02:00:00 UTC
|
||||
{Timestamp: 1609470000000, Value: 4.0}, // 01 Jan 2021 03:00:00 UTC
|
||||
{Timestamp: 1609473600000, Value: 5.0}, // 01 Jan 2021 04:00:00 UTC
|
||||
},
|
||||
},
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609466400000, Value: 6.0}, // 01 Jan 2021 02:00:00 UTC
|
||||
{Timestamp: 1609470000000, Value: 7.0}, // 01 Jan 2021 03:00:00 UTC
|
||||
{Timestamp: 1609473600000, Value: 8.0}, // 01 Jan 2021 04:00:00 UTC
|
||||
},
|
||||
},
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609466400000, Value: 9.0}, // 01 Jan 2021 02:00:00 UTC
|
||||
{Timestamp: 1609470000000, Value: 10.0}, // 01 Jan 2021 03:00:00 UTC
|
||||
{Timestamp: 1609473600000, Value: 11.0}, // 01 Jan 2021 04:00:00 UTC
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedStart: 1609466400000,
|
||||
expectedEnd: 1609477200000,
|
||||
},
|
||||
{
|
||||
name: "Filter last point",
|
||||
missStart: 1609466400000, // 01 Jan 2021 02:00:00 UTC
|
||||
missEnd: 1609475400000, // 01 Jan 2021 04:30:00 UTC
|
||||
stepInterval: 3600, // 1 hour
|
||||
seriesList: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609466400000, Value: 3.0}, // 01 Jan 2021 02:00:00 UTC
|
||||
{Timestamp: 1609470000000, Value: 4.0}, // 01 Jan 2021 03:00:00 UTC
|
||||
{Timestamp: 1609473600000, Value: 5.0}, // 01 Jan 2021 04:00:00 UTC
|
||||
},
|
||||
},
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609466400000, Value: 6.0}, // 01 Jan 2021 02:00:00 UTC
|
||||
},
|
||||
},
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609466400000, Value: 9.0}, // 01 Jan 2021 02:00:00 UTC
|
||||
{Timestamp: 1609470000000, Value: 10.0}, // 01 Jan 2021 03:00:00 UTC
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedPoints: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609466400000, Value: 3.0}, // 01 Jan 2021 02:00:00 UTC
|
||||
{Timestamp: 1609470000000, Value: 4.0}, // 01 Jan 2021 03:00:00 UTC
|
||||
},
|
||||
},
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609466400000, Value: 6.0}, // 01 Jan 2021 02:00:00 UTC
|
||||
},
|
||||
},
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609466400000, Value: 9.0}, // 01 Jan 2021 02:00:00 UTC
|
||||
{Timestamp: 1609470000000, Value: 10.0}, // 01 Jan 2021 03:00:00 UTC
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedStart: 1609466400000,
|
||||
expectedEnd: 1609473600000,
|
||||
},
|
||||
{
|
||||
name: "half range should return empty result",
|
||||
missStart: 1609473600000, // 01 Jan 2021 04:00:00 UTC
|
||||
missEnd: 1609475400000, // 01 Jan 2021 04:30:00 UTC
|
||||
stepInterval: 3600, // 1 hour
|
||||
seriesList: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609473600000, Value: 1.0}, // 01 Jan 2021 04:00:00 UTC
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedPoints: []*v3.Series{},
|
||||
expectedStart: 1609473600000,
|
||||
expectedEnd: 1609475400000,
|
||||
},
|
||||
{
|
||||
name: "respect actual empty series",
|
||||
missStart: 1609466400000, // 01 Jan 2021 02:00:00 UTC
|
||||
missEnd: 1609475400000, // 01 Jan 2021 04:30:00 UTC
|
||||
stepInterval: 3600, // 1 hour
|
||||
seriesList: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{},
|
||||
},
|
||||
},
|
||||
expectedPoints: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{},
|
||||
},
|
||||
},
|
||||
expectedStart: 1609466400000,
|
||||
expectedEnd: 1609473600000,
|
||||
},
|
||||
{
|
||||
name: "Remove point that is not a complete aggregation window",
|
||||
missStart: 1609466400000, // 01 Jan 2021 02:00:00 UTC
|
||||
missEnd: 1609470000000, // 01 Jan 2021 03:00:00 UTC
|
||||
stepInterval: 3600, // 1 hour
|
||||
seriesList: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609466400000, Value: 2.0}, // 01 Jan 2021 02:00:00 UTC
|
||||
{Timestamp: 1609470000000, Value: 3.0}, // 01 Jan 2021 03:00:00 UTC
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedPoints: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609466400000, Value: 2.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedStart: 1609466400000,
|
||||
expectedEnd: 1609470000000,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
filteredSeries, startTime, endTime := FilterSeriesPoints(tc.seriesList, tc.missStart, tc.missEnd, tc.stepInterval)
|
||||
|
||||
if len(tc.expectedPoints) != len(filteredSeries) {
|
||||
t.Errorf("Expected %d series, got %d", len(tc.expectedPoints), len(filteredSeries))
|
||||
return
|
||||
}
|
||||
|
||||
for i := range tc.expectedPoints {
|
||||
if len(tc.expectedPoints[i].Points) != len(filteredSeries[i].Points) {
|
||||
t.Errorf("Series %d: Expected %d points, got %d\nExpected points: %+v\nGot points: %+v",
|
||||
i,
|
||||
len(tc.expectedPoints[i].Points),
|
||||
len(filteredSeries[i].Points),
|
||||
tc.expectedPoints[i].Points,
|
||||
filteredSeries[i].Points)
|
||||
continue
|
||||
}
|
||||
|
||||
for j := range tc.expectedPoints[i].Points {
|
||||
if tc.expectedPoints[i].Points[j].Timestamp != filteredSeries[i].Points[j].Timestamp {
|
||||
t.Errorf("Series %d Point %d: Expected timestamp %d, got %d", i, j, tc.expectedPoints[i].Points[j].Timestamp, filteredSeries[i].Points[j].Timestamp)
|
||||
}
|
||||
if tc.expectedPoints[i].Points[j].Value != filteredSeries[i].Points[j].Value {
|
||||
t.Errorf("Series %d Point %d: Expected value %f, got %f", i, j, tc.expectedPoints[i].Points[j].Value, filteredSeries[i].Points[j].Value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if tc.expectedStart != startTime {
|
||||
t.Errorf("Expected start time %d, got %d", tc.expectedStart, startTime)
|
||||
}
|
||||
if tc.expectedEnd != endTime {
|
||||
t.Errorf("Expected end time %d, got %d", tc.expectedEnd, endTime)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetSeriesFromCachedData(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
data []querycache.CachedSeriesData
|
||||
start int64
|
||||
end int64
|
||||
expectedCount int
|
||||
expectedPoints int
|
||||
}{
|
||||
{
|
||||
name: "Single point outside range",
|
||||
data: []querycache.CachedSeriesData{
|
||||
{
|
||||
Data: []*v3.Series{
|
||||
{
|
||||
Labels: map[string]string{"label1": "value1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609473600000, Value: 1.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
start: 1609475400000, // 01 Jan 2021 04:30:00 UTC
|
||||
end: 1609477200000, // 01 Jan 2021 05:00:00 UTC
|
||||
expectedCount: 1,
|
||||
expectedPoints: 0,
|
||||
},
|
||||
{
|
||||
name: "Single point inside range",
|
||||
data: []querycache.CachedSeriesData{
|
||||
{
|
||||
Data: []*v3.Series{
|
||||
{
|
||||
Labels: map[string]string{"label1": "value1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609476000000, Value: 1.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
start: 1609475400000, // 01 Jan 2021 04:30:00 UTC
|
||||
end: 1609477200000, // 01 Jan 2021 05:00:00 UTC
|
||||
expectedCount: 1,
|
||||
expectedPoints: 1,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
series := GetSeriesFromCachedData(tc.data, tc.start, tc.end)
|
||||
|
||||
if len(series) != tc.expectedCount {
|
||||
t.Errorf("Expected %d series, got %d", tc.expectedCount, len(series))
|
||||
}
|
||||
if len(series[0].Points) != tc.expectedPoints {
|
||||
t.Errorf("Expected %d points, got %d", tc.expectedPoints, len(series[0].Points))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetSeriesFromCachedDataV2(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
data []querycache.CachedSeriesData
|
||||
start int64
|
||||
end int64
|
||||
step int64
|
||||
expectedCount int
|
||||
expectedPoints int
|
||||
}{
|
||||
{
|
||||
name: "Single point outside range",
|
||||
data: []querycache.CachedSeriesData{
|
||||
{
|
||||
Data: []*v3.Series{
|
||||
{
|
||||
Labels: map[string]string{"label1": "value1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609473600000, Value: 1.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
start: 1609475400000,
|
||||
end: 1609477200000,
|
||||
step: 1000,
|
||||
expectedCount: 1,
|
||||
expectedPoints: 0,
|
||||
},
|
||||
{
|
||||
name: "Single point inside range",
|
||||
data: []querycache.CachedSeriesData{
|
||||
{
|
||||
Data: []*v3.Series{
|
||||
{
|
||||
Labels: map[string]string{"label1": "value1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1609476000000, Value: 1.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
start: 1609475400000,
|
||||
end: 1609477200000,
|
||||
step: 1000,
|
||||
expectedCount: 1,
|
||||
expectedPoints: 1,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
series := GetSeriesFromCachedDataV2(tc.data, tc.start, tc.end, tc.step)
|
||||
|
||||
if len(series) != tc.expectedCount {
|
||||
t.Errorf("Expected %d series, got %d", tc.expectedCount, len(series))
|
||||
}
|
||||
if len(series[0].Points) != tc.expectedPoints {
|
||||
t.Errorf("Expected %d points, got %d", tc.expectedPoints, len(series[0].Points))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -149,4 +149,6 @@ type QueryCache interface {
|
||||
FindMissingTimeRanges(start, end int64, step int64, cacheKey string) []querycache.MissInterval
|
||||
FindMissingTimeRangesV2(start, end int64, step int64, cacheKey string) []querycache.MissInterval
|
||||
MergeWithCachedSeriesData(cacheKey string, newData []querycache.CachedSeriesData) []querycache.CachedSeriesData
|
||||
StoreSeriesInCache(cacheKey string, series []querycache.CachedSeriesData)
|
||||
MergeWithCachedSeriesDataV2(cacheKey string, series []querycache.CachedSeriesData) []querycache.CachedSeriesData
|
||||
}
|
||||
|
||||
@@ -50,21 +50,7 @@ type GetTopOperationsParams struct {
|
||||
Limit int `json:"limit"`
|
||||
}
|
||||
|
||||
type EventType string
|
||||
|
||||
const (
|
||||
TrackEvent EventType = "track"
|
||||
IdentifyEvent EventType = "identify"
|
||||
GroupEvent EventType = "group"
|
||||
)
|
||||
|
||||
// IsValid checks if the EventType is one of the valid values
|
||||
func (e EventType) IsValid() bool {
|
||||
return e == TrackEvent || e == IdentifyEvent || e == GroupEvent
|
||||
}
|
||||
|
||||
type RegisterEventParams struct {
|
||||
EventType EventType `json:"eventType"`
|
||||
EventName string `json:"eventName"`
|
||||
Attributes map[string]interface{} `json:"attributes"`
|
||||
RateLimited bool `json:"rateLimited"`
|
||||
|
||||
@@ -297,8 +297,6 @@ func (q AttributeKeyDataType) String() string {
|
||||
// for a selected aggregate operator, aggregate attribute, filter attribute key
|
||||
// and search text.
|
||||
type FilterAttributeValueRequest struct {
|
||||
StartTimeMillis int64 `json:"startTimeMillis"`
|
||||
EndTimeMillis int64 `json:"endTimeMillis"`
|
||||
DataSource DataSource `json:"dataSource"`
|
||||
AggregateOperator AggregateOperator `json:"aggregateOperator"`
|
||||
AggregateAttribute string `json:"aggregateAttribute"`
|
||||
@@ -307,51 +305,6 @@ type FilterAttributeValueRequest struct {
|
||||
TagType TagType `json:"tagType"`
|
||||
SearchText string `json:"searchText"`
|
||||
Limit int `json:"limit"`
|
||||
ExistingFilterItems []FilterItem `json:"existingFilterItems"`
|
||||
MetricNames []string `json:"metricNames"`
|
||||
IncludeRelated bool `json:"includeRelated"`
|
||||
}
|
||||
|
||||
func (f *FilterAttributeValueRequest) Validate() error {
|
||||
if f.FilterAttributeKey == "" {
|
||||
return fmt.Errorf("filterAttributeKey is required")
|
||||
}
|
||||
|
||||
if f.StartTimeMillis == 0 {
|
||||
return fmt.Errorf("startTimeMillis is required")
|
||||
}
|
||||
|
||||
if f.EndTimeMillis == 0 {
|
||||
return fmt.Errorf("endTimeMillis is required")
|
||||
}
|
||||
|
||||
if f.Limit == 0 {
|
||||
f.Limit = 100
|
||||
}
|
||||
|
||||
if f.Limit > 1000 {
|
||||
return fmt.Errorf("limit must be less than 1000")
|
||||
}
|
||||
|
||||
if f.ExistingFilterItems != nil {
|
||||
for _, value := range f.ExistingFilterItems {
|
||||
if value.Key.Key == "" {
|
||||
return fmt.Errorf("existingFilterItems must contain a valid key")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := f.DataSource.Validate(); err != nil {
|
||||
return fmt.Errorf("invalid data source: %w", err)
|
||||
}
|
||||
|
||||
if f.DataSource != DataSourceMetrics {
|
||||
if err := f.AggregateOperator.Validate(); err != nil {
|
||||
return fmt.Errorf("invalid aggregate operator: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type AggregateAttributeResponse struct {
|
||||
@@ -413,10 +366,9 @@ func (a AttributeKey) Validate() error {
|
||||
}
|
||||
|
||||
type FilterAttributeValueResponse struct {
|
||||
StringAttributeValues []string `json:"stringAttributeValues"`
|
||||
NumberAttributeValues []interface{} `json:"numberAttributeValues"`
|
||||
BoolAttributeValues []bool `json:"boolAttributeValues"`
|
||||
RelatedValues *FilterAttributeValueResponse `json:"relatedValues,omitempty"`
|
||||
StringAttributeValues []string `json:"stringAttributeValues"`
|
||||
NumberAttributeValues []interface{} `json:"numberAttributeValues"`
|
||||
BoolAttributeValues []bool `json:"boolAttributeValues"`
|
||||
}
|
||||
|
||||
type QueryRangeParamsV3 struct {
|
||||
|
||||
@@ -264,6 +264,9 @@ func (q *queryCache) mergeSeries(cachedSeries, missedSeries []*v3.Series) []*v3.
|
||||
}
|
||||
|
||||
func (q *queryCache) storeMergedData(cacheKey string, mergedData []CachedSeriesData) {
|
||||
if q.cache == nil {
|
||||
return
|
||||
}
|
||||
mergedDataJSON, err := json.Marshal(mergedData)
|
||||
if err != nil {
|
||||
zap.L().Error("error marshalling merged data", zap.Error(err))
|
||||
@@ -275,8 +278,7 @@ func (q *queryCache) storeMergedData(cacheKey string, mergedData []CachedSeriesD
|
||||
}
|
||||
}
|
||||
|
||||
func (q *queryCache) MergeWithCachedSeriesData(cacheKey string, newData []CachedSeriesData) []CachedSeriesData {
|
||||
|
||||
func (q *queryCache) MergeWithCachedSeriesDataV2(cacheKey string, newData []CachedSeriesData) []CachedSeriesData {
|
||||
if q.cache == nil {
|
||||
return newData
|
||||
}
|
||||
@@ -284,8 +286,7 @@ func (q *queryCache) MergeWithCachedSeriesData(cacheKey string, newData []Cached
|
||||
cachedData, _, _ := q.cache.Retrieve(cacheKey, true)
|
||||
var existingData []CachedSeriesData
|
||||
if err := json.Unmarshal(cachedData, &existingData); err != nil {
|
||||
// In case of error, we return the entire range as a miss
|
||||
q.storeMergedData(cacheKey, newData)
|
||||
zap.L().Error("error unmarshalling existing data", zap.Error(err))
|
||||
return newData
|
||||
}
|
||||
|
||||
@@ -330,7 +331,16 @@ func (q *queryCache) MergeWithCachedSeriesData(cacheKey string, newData []Cached
|
||||
mergedData = append(mergedData, *current)
|
||||
}
|
||||
|
||||
q.storeMergedData(cacheKey, mergedData)
|
||||
|
||||
return mergedData
|
||||
}
|
||||
|
||||
func (q *queryCache) MergeWithCachedSeriesData(cacheKey string, newData []CachedSeriesData) []CachedSeriesData {
|
||||
|
||||
mergedData := q.MergeWithCachedSeriesDataV2(cacheKey, newData)
|
||||
q.storeMergedData(cacheKey, mergedData)
|
||||
return mergedData
|
||||
}
|
||||
|
||||
func (q *queryCache) StoreSeriesInCache(cacheKey string, series []CachedSeriesData) {
|
||||
q.storeMergedData(cacheKey, series)
|
||||
}
|
||||
|
||||
@@ -409,24 +409,7 @@ func createTelemetry() {
|
||||
telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, dashboardsAlertsData, user.Email, false, false)
|
||||
}
|
||||
}
|
||||
telemetry.SendIdentifyEvent(map[string]interface{}{
|
||||
"total_logs": totalLogs,
|
||||
"total_traces": totalSpans,
|
||||
"total_metrics": totalSamples,
|
||||
"total_users": userCount,
|
||||
"total_channels": alertsInfo.TotalChannels,
|
||||
"total_dashboards_with_panel": dashboardsInfo.TotalDashboardsWithPanelAndName,
|
||||
"total_saved_views": savedViewsInfo.TotalSavedViews,
|
||||
"total_active_alerts": alertsInfo.TotalActiveAlerts,
|
||||
"total_traces_based_alerts": alertsInfo.TracesBasedAlerts,
|
||||
"total_logs_based_alerts": alertsInfo.LogsBasedAlerts,
|
||||
"total_metric_based_alerts": alertsInfo.MetricBasedAlerts,
|
||||
"total_anomaly_based_alerts": alertsInfo.AnomalyBasedAlerts,
|
||||
"total_metrics_based_panels": dashboardsInfo.MetricBasedPanels,
|
||||
"total_logs_based_panels": dashboardsInfo.LogsBasedPanels,
|
||||
"total_traces_based_panels": dashboardsInfo.TracesBasedPanels,
|
||||
})
|
||||
telemetry.SendGroupEvent(map[string]interface{}{
|
||||
telemetry.SendIdentityEvent(map[string]interface{}{
|
||||
"total_logs": totalLogs,
|
||||
"total_traces": totalSpans,
|
||||
"total_metrics": totalSamples,
|
||||
@@ -451,16 +434,13 @@ func createTelemetry() {
|
||||
}
|
||||
|
||||
if totalLogs > 0 {
|
||||
telemetry.SendIdentifyEvent(map[string]interface{}{"sent_logs": true})
|
||||
telemetry.SendGroupEvent(map[string]interface{}{"sent_logs": true})
|
||||
telemetry.SendIdentityEvent(map[string]interface{}{"sent_logs": true})
|
||||
}
|
||||
if totalSpans > 0 {
|
||||
telemetry.SendIdentifyEvent(map[string]interface{}{"sent_traces": true})
|
||||
telemetry.SendGroupEvent(map[string]interface{}{"sent_traces": true})
|
||||
telemetry.SendIdentityEvent(map[string]interface{}{"sent_traces": true})
|
||||
}
|
||||
if totalSamples > 0 {
|
||||
telemetry.SendIdentifyEvent(map[string]interface{}{"sent_metrics": true})
|
||||
telemetry.SendGroupEvent(map[string]interface{}{"sent_metrics": true})
|
||||
telemetry.SendIdentityEvent(map[string]interface{}{"sent_metrics": true})
|
||||
}
|
||||
|
||||
getDistributedInfoInLastHeartBeatInterval, _ := telemetry.reader.GetDistributedInfoInLastHeartBeatInterval(ctx)
|
||||
@@ -591,7 +571,7 @@ func (a *Telemetry) IdentifyUser(user *types.User) {
|
||||
}
|
||||
}
|
||||
|
||||
func (a *Telemetry) SendIdentifyEvent(data map[string]interface{}) {
|
||||
func (a *Telemetry) SendIdentityEvent(data map[string]interface{}) {
|
||||
|
||||
if !a.isTelemetryEnabled() || a.isTelemetryAnonymous() {
|
||||
return
|
||||
@@ -602,37 +582,23 @@ func (a *Telemetry) SendIdentifyEvent(data map[string]interface{}) {
|
||||
traits.Set(k, v)
|
||||
}
|
||||
if a.saasOperator != nil {
|
||||
|
||||
a.saasOperator.Enqueue(analytics.Identify{
|
||||
UserId: a.GetUserEmail(),
|
||||
Traits: traits,
|
||||
})
|
||||
a.saasOperator.Enqueue(analytics.Group{
|
||||
UserId: a.userEmail,
|
||||
GroupId: a.getCompanyDomain(),
|
||||
Traits: traits,
|
||||
})
|
||||
}
|
||||
if a.ossOperator != nil {
|
||||
a.ossOperator.Enqueue(analytics.Identify{
|
||||
UserId: a.ipAddress,
|
||||
Traits: traits,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (a *Telemetry) SendGroupEvent(data map[string]interface{}) {
|
||||
|
||||
if !a.isTelemetryEnabled() || a.isTelemetryAnonymous() {
|
||||
return
|
||||
}
|
||||
traits := analytics.NewTraits()
|
||||
|
||||
for k, v := range data {
|
||||
traits.Set(k, v)
|
||||
}
|
||||
if a.saasOperator != nil {
|
||||
a.saasOperator.Enqueue(analytics.Group{
|
||||
UserId: a.GetUserEmail(),
|
||||
GroupId: a.getCompanyDomain(),
|
||||
Traits: traits,
|
||||
})
|
||||
}
|
||||
if a.ossOperator != nil {
|
||||
// Updating a groups properties
|
||||
a.ossOperator.Enqueue(analytics.Group{
|
||||
UserId: a.ipAddress,
|
||||
GroupId: a.getCompanyDomain(),
|
||||
|
||||
Reference in New Issue
Block a user