Compare commits

..

2 Commits

Author SHA1 Message Date
Vishal Sharma
8096dbc7af Merge branch 'develop' into fix/precheck-login 2023-12-13 16:21:51 +05:30
makeavish
6900b2a998 fix: return isUser false in case precheck login is not able to create user when sso is enabled 2023-11-27 19:40:44 +05:30
78 changed files with 374 additions and 1747 deletions

4
.github/CODEOWNERS vendored
View File

@@ -8,4 +8,8 @@
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
/deploy/ @prashant-shahi
/sample-apps/ @prashant-shahi
**/query-service/ @srikanthccv
Makefile @srikanthccv
go.* @srikanthccv
.git* @srikanthccv
.github @prashant-shahi

View File

@@ -1,32 +0,0 @@
name: Code Coverage
on:
push:
branches:
- develop
- main
- release/v*
pull_request:
branches:
- develop
- main
- release/v*
jobs:
coverage:
runs-on: ubuntu-latest
permissions:
checks: write
pull-requests: write
contents: write
steps:
- name: Checkout Repository
uses: actions/checkout@v2
- uses: jwalton/gh-find-current-pr@v1
id: findPr
- uses: ArtiomTr/jest-coverage-report-action@v2
with:
package-manager: yarn
working-directory: frontend
test-script: yarn jest:coverage
github-token: ${{ secrets.GITHUB_TOKEN }}
output: comment
prnumber: ${{ steps.findPr.outputs.number }}

View File

@@ -1,7 +1,7 @@
version: "3.9"
x-clickhouse-defaults: &clickhouse-defaults
image: clickhouse/clickhouse-server:23.11.1-alpine
image: clickhouse/clickhouse-server:23.7.3-alpine
tty: true
deploy:
restart_policy:
@@ -146,7 +146,7 @@ services:
condition: on-failure
query-service:
image: signoz/query-service:0.35.1
image: signoz/query-service:0.35.0
command:
[
"-config=/root/config/prometheus.yml",
@@ -186,7 +186,7 @@ services:
<<: *db-depend
frontend:
image: signoz/frontend:0.35.1
image: signoz/frontend:0.35.0
deploy:
restart_policy:
condition: on-failure
@@ -199,7 +199,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector:
image: signoz/signoz-otel-collector:0.88.3
image: signoz/signoz-otel-collector:0.88.1
command:
[
"--config=/etc/otel-collector-config.yaml",
@@ -237,7 +237,7 @@ services:
- query-service
otel-collector-migrator:
image: signoz/signoz-schema-migrator:0.88.3
image: signoz/signoz-schema-migrator:0.88.1
deploy:
restart_policy:
condition: on-failure
@@ -250,7 +250,7 @@ services:
# - clickhouse-3
otel-collector-metrics:
image: signoz/signoz-otel-collector:0.88.3
image: signoz/signoz-otel-collector:0.88.1
command:
[
"--config=/etc/otel-collector-metrics-config.yaml",

View File

@@ -66,7 +66,7 @@ services:
- --storage.path=/data
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.3}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.1}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -81,7 +81,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
otel-collector:
container_name: signoz-otel-collector
image: signoz/signoz-otel-collector:0.88.3
image: signoz/signoz-otel-collector:0.88.1
command:
[
"--config=/etc/otel-collector-config.yaml",
@@ -118,7 +118,7 @@ services:
otel-collector-metrics:
container_name: signoz-otel-collector-metrics
image: signoz/signoz-otel-collector:0.88.3
image: signoz/signoz-otel-collector:0.88.1
command:
[
"--config=/etc/otel-collector-metrics-config.yaml",

View File

@@ -3,7 +3,7 @@ version: "2.4"
x-clickhouse-defaults: &clickhouse-defaults
restart: on-failure
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
image: clickhouse/clickhouse-server:23.11.1-alpine
image: clickhouse/clickhouse-server:23.7.3-alpine
tty: true
depends_on:
- zookeeper-1
@@ -164,7 +164,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service:
image: signoz/query-service:${DOCKER_TAG:-0.35.1}
image: signoz/query-service:${DOCKER_TAG:-0.35.0}
container_name: signoz-query-service
command:
[
@@ -203,7 +203,7 @@ services:
<<: *db-depend
frontend:
image: signoz/frontend:${DOCKER_TAG:-0.35.1}
image: signoz/frontend:${DOCKER_TAG:-0.35.0}
container_name: signoz-frontend
restart: on-failure
depends_on:
@@ -215,7 +215,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.3}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.1}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -229,7 +229,7 @@ services:
otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.3}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.1}
container_name: signoz-otel-collector
command:
[
@@ -269,7 +269,7 @@ services:
condition: service_healthy
otel-collector-metrics:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.3}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.1}
container_name: signoz-otel-collector-metrics
command:
[

View File

@@ -185,6 +185,7 @@ func (ah *APIHandler) precheckLogin(w http.ResponseWriter, r *http.Request) {
resp, apierr := ah.AppDao().PrecheckLogin(ctx, email, sourceUrl)
if apierr != nil {
RespondError(w, apierr, resp)
return
}
ah.Respond(w, resp)

View File

@@ -12,7 +12,6 @@ import (
"github.com/gorilla/mux"
"go.signoz.io/signoz/ee/query-service/model"
"go.signoz.io/signoz/pkg/query-service/auth"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
@@ -48,18 +47,8 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
req.CreatedAt = time.Now().Unix()
req.Token = generatePATToken()
// default expiry is 30 days
if req.ExpiresAt == 0 {
req.ExpiresAt = time.Now().AddDate(0, 0, 30).Unix()
}
// max expiry is 1 year
if req.ExpiresAt > time.Now().AddDate(1, 0, 0).Unix() {
req.ExpiresAt = time.Now().AddDate(1, 0, 0).Unix()
}
zap.S().Debugf("Got PAT request: %+v", req)
var apierr basemodel.BaseApiError
if req, apierr = ah.AppDao().CreatePAT(ctx, req); apierr != nil {
if apierr := ah.AppDao().CreatePAT(ctx, &req); apierr != nil {
RespondError(w, apierr, nil)
return
}

View File

@@ -480,7 +480,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
}
}
if _, ok := telemetry.EnabledPaths()[path]; ok {
if _, ok := telemetry.IgnoredPaths()[path]; !ok {
userEmail, err := auth.GetEmailFromJwt(r.Context())
if err == nil {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail)

View File

@@ -33,7 +33,7 @@ type ModelDao interface {
DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
GetDomainByEmail(ctx context.Context, email string) (*model.OrgDomain, basemodel.BaseApiError)
CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basemodel.BaseApiError)
CreatePAT(ctx context.Context, p *model.PAT) basemodel.BaseApiError
GetPAT(ctx context.Context, pat string) (*model.PAT, basemodel.BaseApiError)
GetPATByID(ctx context.Context, id string) (*model.PAT, basemodel.BaseApiError)
GetUserByPAT(ctx context.Context, token string) (*basemodel.UserPayload, basemodel.BaseApiError)

View File

@@ -150,8 +150,6 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
if ssoAvailable {
resp.IsUser = true
// find domain from email
orgDomain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
@@ -180,7 +178,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
return resp, model.InternalError(fmt.Errorf("failed to generate login request"))
}
// build Idp URL that will authenticat the user
// build Idp URL that will authenticate the user
// the front-end will redirect user to this url
resp.SsoUrl, err = orgDomain.BuildSsoUrl(siteUrl)
@@ -189,6 +187,8 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
return resp, model.InternalError(err)
}
// set IsUser to true, as the user is valid
resp.IsUser = true
// set SSO to true, as the url is generated correctly
resp.SSO = true
}

View File

@@ -3,15 +3,14 @@ package sqlite
import (
"context"
"fmt"
"strconv"
"go.signoz.io/signoz/ee/query-service/model"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basemodel.BaseApiError) {
result, err := m.DB().ExecContext(ctx,
func (m *modelDao) CreatePAT(ctx context.Context, p *model.PAT) basemodel.BaseApiError {
_, err := m.DB().ExecContext(ctx,
"INSERT INTO personal_access_tokens (user_id, token, name, created_at, expires_at) VALUES ($1, $2, $3, $4, $5)",
p.UserID,
p.Token,
@@ -20,15 +19,9 @@ func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basem
p.ExpiresAt)
if err != nil {
zap.S().Errorf("Failed to insert PAT in db, err: %v", zap.Error(err))
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
return model.InternalError(fmt.Errorf("PAT insertion failed"))
}
id, err := result.LastInsertId()
if err != nil {
zap.S().Errorf("Failed to get last inserted id, err: %v", zap.Error(err))
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
}
p.Id = strconv.Itoa(int(id))
return p, nil
return nil
}
func (m *modelDao) ListPATs(ctx context.Context, userID string) ([]model.PAT, basemodel.BaseApiError) {
@@ -97,7 +90,7 @@ func (m *modelDao) GetUserByPAT(ctx context.Context, token string) (*basemodel.U
u.org_id,
u.group_id
FROM users u, personal_access_tokens p
WHERE u.id = p.user_id and p.token=? and p.expires_at >= strftime('%s', 'now');`
WHERE u.id = p.user_id and p.token=?;`
if err := m.DB().Select(&users, query, token); err != nil {
return nil, model.InternalError(fmt.Errorf("failed to fetch user from PAT, err: %v", err))

View File

@@ -6,5 +6,5 @@ type PAT struct {
Token string `json:"token" db:"token"`
Name string `json:"name" db:"name"`
CreatedAt int64 `json:"createdAt" db:"created_at"`
ExpiresAt int64 `json:"expiresAt" db:"expires_at"`
ExpiresAt int64 `json:"expiresAt" db:"expires_at"` // unused as of now
}

View File

@@ -52,14 +52,14 @@ var BasicPlan = basemodel.FeatureSet{
Name: basemodel.QueryBuilderPanels,
Active: true,
Usage: 0,
UsageLimit: 20,
UsageLimit: 5,
Route: "",
},
basemodel.Feature{
Name: basemodel.QueryBuilderAlerts,
Active: true,
Usage: 0,
UsageLimit: 10,
UsageLimit: 5,
Route: "",
},
basemodel.Feature{

View File

@@ -86,7 +86,6 @@ module.exports = {
},
],
'import/no-extraneous-dependencies': ['error', { devDependencies: true }],
'no-plusplus': 'off',
'jsx-a11y/label-has-associated-control': [
'error',
{
@@ -110,6 +109,7 @@ module.exports = {
// eslint rules need to remove
'@typescript-eslint/no-shadow': 'off',
'import/no-cycle': 'off',
'prettier/prettier': [
'error',
{},

View File

@@ -29,9 +29,6 @@
"dependencies": {
"@ant-design/colors": "6.0.0",
"@ant-design/icons": "4.8.0",
"@dnd-kit/core": "6.1.0",
"@dnd-kit/modifiers": "7.0.0",
"@dnd-kit/sortable": "8.0.0",
"@grafana/data": "^9.5.2",
"@mdx-js/loader": "2.3.0",
"@mdx-js/react": "2.3.0",

View File

@@ -21,9 +21,5 @@
"error_while_updating_variable": "Error while updating variable",
"dashboard_has_been_updated": "Dashboard has been updated",
"do_you_want_to_refresh_the_dashboard": "Do you want to refresh the dashboard?",
"delete_dashboard_success": "{{name}} dashboard deleted successfully",
"dashboard_unsave_changes": "There are unsaved changes in the Query builder, please stage and run the query or the changes will be lost. Press OK to discard.",
"dashboard_save_changes": "Your graph built with {{queryTag}} query will be saved. Press OK to confirm.",
"your_graph_build_with": "Your graph built with",
"dashboar_ok_confirm": "query will be saved. Press OK to confirm."
"delete_dashboard_success": "{{name}} dashboard deleted successfully"
}

View File

@@ -24,9 +24,5 @@
"do_you_want_to_refresh_the_dashboard": "Do you want to refresh the dashboard?",
"locked_dashboard_delete_tooltip_admin_author": "Dashboard is locked. Please unlock the dashboard to enable delete.",
"locked_dashboard_delete_tooltip_editor": "Dashboard is locked. Please contact admin to delete the dashboard.",
"delete_dashboard_success": "{{name}} dashboard deleted successfully",
"dashboard_unsave_changes": "There are unsaved changes in the Query builder, please stage and run the query or the changes will be lost. Press OK to discard.",
"dashboard_save_changes": "Your graph built with {{queryTag}} query will be saved. Press OK to confirm.",
"your_graph_build_with": "Your graph built with",
"dashboar_ok_confirm": "query will be saved. Press OK to confirm."
"delete_dashboard_success": "{{name}} dashboard deleted successfully"
}

View File

@@ -1,9 +1,10 @@
import cacheBursting from 'i18n-translations-hash.json';
import i18n from 'i18next';
import LanguageDetector from 'i18next-browser-languagedetector';
import Backend from 'i18next-http-backend';
import { initReactI18next } from 'react-i18next';
import cacheBursting from '../../i18n-translations-hash.json';
i18n
// load translation using http -> see /public/locales
.use(Backend)

View File

@@ -5,7 +5,6 @@ import { QueryParams } from 'constants/query';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
import isEqual from 'lodash-es/isEqual';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import {
DeleteViewHandlerProps,
@@ -36,45 +35,6 @@ export const getViewDetailsUsingViewKey: GetViewDetailsUsingViewKey = (
return undefined;
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const omitIdFromQuery = (query: Query | null): any => ({
...query,
builder: {
...query?.builder,
queryData: query?.builder.queryData.map((queryData) => {
const { id, ...rest } = queryData.aggregateAttribute;
const newAggregateAttribute = rest;
const newGroupByAttributes = queryData.groupBy.map((groupByAttribute) => {
const { id, ...rest } = groupByAttribute;
return rest;
});
const newItems = queryData.filters.items.map((item) => {
const { id, ...newItem } = item;
if (item.key) {
const { id, ...rest } = item.key;
return {
...newItem,
key: rest,
};
}
return newItem;
});
return {
...queryData,
aggregateAttribute: newAggregateAttribute,
groupBy: newGroupByAttributes,
filters: {
...queryData.filters,
items: newItems,
},
limit: queryData.limit ? queryData.limit : 0,
offset: queryData.offset ? queryData.offset : 0,
pageSize: queryData.pageSize ? queryData.pageSize : 0,
};
}),
},
});
export const isQueryUpdatedInView = ({
viewKey,
data,
@@ -88,7 +48,43 @@ export const isQueryUpdatedInView = ({
const { query, panelType } = currentViewDetails;
// Omitting id from aggregateAttribute and groupBy
const updatedCurrentQuery = omitIdFromQuery(stagedQuery);
const updatedCurrentQuery = {
...stagedQuery,
builder: {
...stagedQuery?.builder,
queryData: stagedQuery?.builder.queryData.map((queryData) => {
const { id, ...rest } = queryData.aggregateAttribute;
const newAggregateAttribute = rest;
const newGroupByAttributes = queryData.groupBy.map((groupByAttribute) => {
const { id, ...rest } = groupByAttribute;
return rest;
});
const newItems = queryData.filters.items.map((item) => {
const { id, ...newItem } = item;
if (item.key) {
const { id, ...rest } = item.key;
return {
...newItem,
key: rest,
};
}
return newItem;
});
return {
...queryData,
aggregateAttribute: newAggregateAttribute,
groupBy: newGroupByAttributes,
filters: {
...queryData.filters,
items: newItems,
},
limit: queryData.limit ? queryData.limit : 0,
offset: queryData.offset ? queryData.offset : 0,
pageSize: queryData.pageSize ? queryData.pageSize : 0,
};
}),
},
};
return (
panelType !== currentPanelType ||

View File

@@ -1,9 +1,8 @@
/* eslint-disable sonarjs/cognitive-complexity */
import './Uplot.styles.scss';
import './uplot.scss';
import { Typography } from 'antd';
import { ToggleGraphProps } from 'components/Graph/types';
import { LineChart } from 'lucide-react';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import {
forwardRef,
@@ -128,16 +127,6 @@ const Uplot = forwardRef<ToggleGraphProps | undefined, UplotProps>(
}
}, [data, resetScales, create]);
if (data && data[0] && data[0]?.length === 0) {
return (
<div className="uplot-no-data not-found">
<LineChart size={48} strokeWidth={0.5} />
<Typography>No Data</Typography>
</div>
);
}
return (
<ErrorBoundary FallbackComponent={ErrorBoundaryFallback}>
<div className="uplot-graph-container" ref={targetRef}>

View File

@@ -13,11 +13,3 @@
height: 100%;
width: 100%;
}
.uplot-no-data {
position: relative;
display: flex;
width: 100%;
flex-direction: column;
gap: 8px;
}

View File

@@ -10,7 +10,7 @@ import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { useEffect, useMemo, useRef, useState } from 'react';
import { useMemo, useRef } from 'react';
import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
@@ -18,7 +18,6 @@ import { AlertDef } from 'types/api/alerts/def';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { EQueryType } from 'types/common/dashboard';
import { GlobalReducer } from 'types/reducer/globalTime';
import { getTimeRange } from 'utils/getTimeRange';
import { ChartContainer, FailedMessageContainer } from './styles';
import { getThresholdLabel } from './utils';
@@ -50,13 +49,9 @@ function ChartPreview({
}: ChartPreviewProps): JSX.Element | null {
const { t } = useTranslation('alerts');
const threshold = alertDef?.condition.target || 0;
const [minTimeScale, setMinTimeScale] = useState<number>();
const [maxTimeScale, setMaxTimeScale] = useState<number>();
const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
AppState,
GlobalReducer
>((state) => state.globalTime);
const { minTime, maxTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
const canQuery = useMemo((): boolean => {
if (!query || query == null) {
@@ -106,13 +101,6 @@ function ChartPreview({
const graphRef = useRef<HTMLDivElement>(null);
useEffect((): void => {
const { startTime, endTime } = getTimeRange(queryResponse);
setMinTimeScale(startTime);
setMaxTimeScale(endTime);
}, [maxTime, minTime, globalSelectedInterval, queryResponse]);
const chartData = getUPlotChartData(queryResponse?.data?.payload);
const containerDimensions = useResizeObserver(graphRef);
@@ -129,8 +117,6 @@ function ChartPreview({
yAxisUnit,
apiResponse: queryResponse?.data?.payload,
dimensions: containerDimensions,
minTimeScale,
maxTimeScale,
isDarkMode,
thresholds: [
{
@@ -155,8 +141,6 @@ function ChartPreview({
yAxisUnit,
queryResponse?.data?.payload,
containerDimensions,
minTimeScale,
maxTimeScale,
isDarkMode,
threshold,
t,

View File

@@ -1,4 +1,3 @@
import { Tooltip } from 'antd';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { LabelContainer } from '../styles';
@@ -24,9 +23,7 @@ function Label({
disabled={disabled}
onClick={onClickHandler}
>
<Tooltip title={label} placement="topLeft">
{getAbbreviatedLabel(label)}
</Tooltip>
{getAbbreviatedLabel(label)}
</LabelContainer>
);
}

View File

@@ -23,7 +23,6 @@ import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { GlobalReducer } from 'types/reducer/globalTime';
import uPlot from 'uplot';
import { getTimeRange } from 'utils/getTimeRange';
import { PANEL_TYPES_VS_FULL_VIEW_TABLE } from './contants';
import GraphManager from './GraphManager';
@@ -93,21 +92,6 @@ function FullView({
const isDarkMode = useIsDarkMode();
const [minTimeScale, setMinTimeScale] = useState<number>();
const [maxTimeScale, setMaxTimeScale] = useState<number>();
const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
AppState,
GlobalReducer
>((state) => state.globalTime);
useEffect((): void => {
const { startTime, endTime } = getTimeRange(response);
setMinTimeScale(startTime);
setMaxTimeScale(endTime);
}, [maxTime, minTime, globalSelectedInterval, response]);
useEffect(() => {
if (!response.isFetching && fullViewRef.current) {
const width = fullViewRef.current?.clientWidth
@@ -130,8 +114,6 @@ function FullView({
graphsVisibilityStates,
setGraphsVisibilityStates,
thresholds: widget.thresholds,
minTimeScale,
maxTimeScale,
});
setChartOptions(newChartOptions);

View File

@@ -1,5 +1,4 @@
import { Skeleton, Typography } from 'antd';
import cx from 'classnames';
import { ToggleGraphProps } from 'components/Graph/types';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { QueryParams } from 'constants/query';
@@ -299,10 +298,7 @@ function WidgetGraphComponent({
</div>
{queryResponse.isLoading && <Skeleton />}
{queryResponse.isSuccess && (
<div
className={cx('widget-graph-container', widget.panelTypes)}
ref={graphRef}
>
<div style={{ height: '90%' }} ref={graphRef}>
<GridPanelSwitch
panelType={widget.panelTypes}
data={data}

View File

@@ -15,7 +15,6 @@ import { useDispatch, useSelector } from 'react-redux';
import { UpdateTimeInterval } from 'store/actions';
import { AppState } from 'store/reducers';
import { GlobalReducer } from 'types/reducer/globalTime';
import { getTimeRange } from 'utils/getTimeRange';
import EmptyWidget from '../EmptyWidget';
import { MenuItemKeys } from '../WidgetHeader/contants';
@@ -35,8 +34,6 @@ function GridCardGraph({
const dispatch = useDispatch();
const [errorMessage, setErrorMessage] = useState<string>();
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
const [minTimeScale, setMinTimeScale] = useState<number>();
const [maxTimeScale, setMaxTimeScale] = useState<number>();
const onDragSelect = useCallback(
(start: number, end: number): void => {
@@ -65,16 +62,16 @@ function GridCardGraph({
}
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
const updatedQuery = useStepInterval(widget?.query);
const isEmptyWidget =
widget?.id === PANEL_TYPES.EMPTY_WIDGET || isEmpty(widget);
const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
AppState,
GlobalReducer
>((state) => state.globalTime);
const updatedQuery = useStepInterval(widget?.query);
const isEmptyWidget =
widget?.id === PANEL_TYPES.EMPTY_WIDGET || isEmpty(widget);
const queryResponse = useGetQueryRange(
{
selectedTime: widget?.timePreferance,
@@ -106,13 +103,6 @@ function GridCardGraph({
const containerDimensions = useResizeObserver(graphRef);
useEffect((): void => {
const { startTime, endTime } = getTimeRange(queryResponse);
setMinTimeScale(startTime);
setMaxTimeScale(endTime);
}, [maxTime, minTime, globalSelectedInterval, queryResponse]);
const chartData = getUPlotChartData(queryResponse?.data?.payload, fillSpans);
const isDarkMode = useIsDarkMode();
@@ -133,8 +123,6 @@ function GridCardGraph({
yAxisUnit: widget?.yAxisUnit,
onClickHandler,
thresholds: widget.thresholds,
minTimeScale,
maxTimeScale,
}),
[
widget?.id,
@@ -145,8 +133,6 @@ function GridCardGraph({
isDarkMode,
onDragSelect,
onClickHandler,
minTimeScale,
maxTimeScale,
],
);

View File

@@ -5,11 +5,3 @@
border: none !important;
}
}
.widget-graph-container {
height: 100%;
&.graph {
height: calc(100% - 30px);
}
}

View File

@@ -2,12 +2,9 @@
display: flex;
justify-content: space-between;
align-items: center;
height: 40px;
height: 30px;
width: 100%;
padding: 0.5rem;
box-sizing: border-box;
font-size: 14px;
font-weight: 600;
}
.widget-header-title {
@@ -22,10 +19,6 @@
visibility: hidden;
border: none;
box-shadow: none;
cursor: pointer;
font: 14px;
font-weight: 600;
padding: 8px;
}
.widget-header-hover {

View File

@@ -10,7 +10,7 @@ import {
MoreOutlined,
WarningOutlined,
} from '@ant-design/icons';
import { Dropdown, MenuProps, Tooltip, Typography } from 'antd';
import { Button, Dropdown, MenuProps, Tooltip, Typography } from 'antd';
import Spinner from 'components/Spinner';
import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
@@ -199,7 +199,9 @@ function WidgetHeader({
</Tooltip>
)}
<Dropdown menu={menu} trigger={['hover']} placement="bottomRight">
<MoreOutlined
<Button
type="default"
icon={<MoreOutlined />}
className={`widget-header-more-options ${
parentHover ? 'widget-header-hover' : ''
}`}

View File

@@ -2,7 +2,7 @@ import { Button as ButtonComponent, Card as CardComponent, Space } from 'antd';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { StyledCSS } from 'container/GantChart/Trace/styles';
import RGL, { WidthProvider } from 'react-grid-layout';
import styled, { css } from 'styled-components';
import styled, { css, FlattenSimpleInterpolation } from 'styled-components';
const ReactGridLayoutComponent = WidthProvider(RGL);
@@ -17,8 +17,14 @@ export const Card = styled(CardComponent)<CardProps>`
}
.ant-card-body {
height: calc(100% - 40px);
height: 90%;
padding: 0;
${({ $panelType }): FlattenSimpleInterpolation =>
$panelType === PANEL_TYPES.TABLE
? css`
padding-top: 1.8rem;
`
: css``}
}
`;

View File

@@ -29,7 +29,7 @@ function SettingsDrawer({ drawerTitle }: { drawerTitle: string }): JSX.Element {
<DrawerContainer
title={drawerTitle}
placement="right"
width="60%"
width="50%"
onClose={onClose}
open={visible}
>

View File

@@ -1,5 +0,0 @@
.delete-variable-name {
font-weight: 700;
color: rgb(207, 19, 34);
font-style: italic;
}

View File

@@ -18,10 +18,10 @@ import {
VariableQueryTypeArr,
VariableSortTypeArr,
} from 'types/api/dashboard/getAll';
import { v4 as generateUUID } from 'uuid';
import { v4 } from 'uuid';
import { variablePropsToPayloadVariables } from '../../../utils';
import { TVariableMode } from '../types';
import { TVariableViewMode } from '../types';
import { LabelContainer, VariableItemRow } from './styles';
const { Option } = Select;
@@ -30,9 +30,9 @@ interface VariableItemProps {
variableData: IDashboardVariable;
existingVariables: Record<string, IDashboardVariable>;
onCancel: () => void;
onSave: (mode: TVariableMode, variableData: IDashboardVariable) => void;
onSave: (name: string, arg0: IDashboardVariable, arg1: string) => void;
validateName: (arg0: string) => boolean;
mode: TVariableMode;
variableViewMode: TVariableViewMode;
}
function VariableItem({
variableData,
@@ -40,7 +40,7 @@ function VariableItem({
onCancel,
onSave,
validateName,
mode,
variableViewMode,
}: VariableItemProps): JSX.Element {
const [variableName, setVariableName] = useState<string>(
variableData.name || '',
@@ -97,7 +97,7 @@ function VariableItem({
]);
const handleSave = (): void => {
const variable: IDashboardVariable = {
const newVariableData: IDashboardVariable = {
name: variableName,
description: variableDescription,
type: queryType,
@@ -111,12 +111,16 @@ function VariableItem({
selectedValue: (variableData.selectedValue ||
variableTextboxValue) as never,
}),
modificationUUID: generateUUID(),
id: variableData.id || generateUUID(),
order: variableData.order,
modificationUUID: v4(),
};
onSave(mode, variable);
onSave(
variableName,
newVariableData,
(variableViewMode === 'EDIT' && variableName !== variableData.name
? variableData.name
: '') as string,
);
onCancel();
};
// Fetches the preview values for the SQL variable query
@@ -171,6 +175,7 @@ function VariableItem({
return (
<div className="variable-item-container">
<div className="variable-item-content">
{/* <Typography.Title level={3}>Add Variable</Typography.Title> */}
<VariableItemRow>
<LabelContainer>
<Typography>Name</Typography>

View File

@@ -1,78 +1,20 @@
import '../DashboardSettings.styles.scss';
import { blue, red } from '@ant-design/colors';
import { MenuOutlined, PlusOutlined } from '@ant-design/icons';
import type { DragEndEvent, UniqueIdentifier } from '@dnd-kit/core';
import {
DndContext,
PointerSensor,
useSensor,
useSensors,
} from '@dnd-kit/core';
import { restrictToVerticalAxis } from '@dnd-kit/modifiers';
import { arrayMove, SortableContext, useSortable } from '@dnd-kit/sortable';
// eslint-disable-next-line import/no-extraneous-dependencies
import { CSS } from '@dnd-kit/utilities';
import { Button, Modal, Row, Space, Table, Typography } from 'antd';
import { RowProps } from 'antd/lib';
import { convertVariablesToDbFormat } from 'container/NewDashboard/DashboardVariablesSelection/util';
import { PlusOutlined } from '@ant-design/icons';
import { Button, Modal, Row, Space, Tag } from 'antd';
import { ResizeTable } from 'components/ResizeTable';
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
import { useNotifications } from 'hooks/useNotifications';
import { PencilIcon, TrashIcon } from 'lucide-react';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import React, { useEffect, useRef, useState } from 'react';
import { useRef, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { Dashboard, IDashboardVariable } from 'types/api/dashboard/getAll';
import { TVariableMode } from './types';
import { TVariableViewMode } from './types';
import VariableItem from './VariableItem/VariableItem';
function TableRow({ children, ...props }: RowProps): JSX.Element {
const {
attributes,
listeners,
setNodeRef,
setActivatorNodeRef,
transform,
transition,
isDragging,
} = useSortable({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
id: props['data-row-key'],
});
const style: React.CSSProperties = {
...props.style,
transform: CSS.Transform.toString(transform && { ...transform, scaleY: 1 }),
transition,
...(isDragging ? { position: 'relative', zIndex: 9999 } : {}),
};
return (
// eslint-disable-next-line react/jsx-props-no-spreading
<tr {...props} ref={setNodeRef} style={style} {...attributes}>
{React.Children.map(children, (child) => {
if ((child as React.ReactElement).key === 'sort') {
return React.cloneElement(child as React.ReactElement, {
children: (
<MenuOutlined
ref={setActivatorNodeRef}
style={{ touchAction: 'none', cursor: 'move' }}
// eslint-disable-next-line react/jsx-props-no-spreading
{...listeners}
/>
),
});
}
return child;
})}
</tr>
);
}
function VariablesSetting(): JSX.Element {
const variableToDelete = useRef<IDashboardVariable | null>(null);
const variableToDelete = useRef<string | null>(null);
const [deleteVariableModal, setDeleteVariableModal] = useState(false);
const { t } = useTranslation(['dashboard']);
@@ -83,15 +25,16 @@ function VariablesSetting(): JSX.Element {
const { variables = {} } = selectedDashboard?.data || {};
const [variablesTableData, setVariablesTableData] = useState<any>([]);
const [variblesOrderArr, setVariablesOrderArr] = useState<number[]>([]);
const [existingVariableNamesMap, setExistingVariableNamesMap] = useState<
Record<string, string>
>({});
const variablesTableData = Object.keys(variables).map((variableName) => ({
key: variableName,
name: variableName,
...variables[variableName],
}));
const [variableViewMode, setVariableViewMode] = useState<null | TVariableMode>(
null,
);
const [
variableViewMode,
setVariableViewMode,
] = useState<null | TVariableViewMode>(null);
const [
variableEditData,
@@ -104,7 +47,7 @@ function VariablesSetting(): JSX.Element {
};
const onVariableViewModeEnter = (
viewType: TVariableMode,
viewType: TVariableViewMode,
varData: IDashboardVariable,
): void => {
setVariableEditData(varData);
@@ -113,41 +56,6 @@ function VariablesSetting(): JSX.Element {
const updateMutation = useUpdateDashboard();
useEffect(() => {
const tableRowData = [];
const variableOrderArr = [];
const variableNamesMap = {};
// eslint-disable-next-line no-restricted-syntax
for (const [key, value] of Object.entries(variables)) {
const { order, id, name } = value;
tableRowData.push({
key,
name: key,
...variables[key],
id,
});
if (name) {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
variableNamesMap[name] = name;
}
if (order) {
variableOrderArr.push(order);
}
}
tableRowData.sort((a, b) => a.order - b.order);
variableOrderArr.sort((a, b) => a - b);
setVariablesTableData(tableRowData);
setVariablesOrderArr(variableOrderArr);
setExistingVariableNamesMap(variableNamesMap);
}, [variables]);
const updateVariables = (
updatedVariablesData: Dashboard['data']['variables'],
): void => {
@@ -181,58 +89,34 @@ function VariablesSetting(): JSX.Element {
);
};
const getVariableOrder = (): number => {
if (variblesOrderArr && variblesOrderArr.length > 0) {
return variblesOrderArr[variblesOrderArr.length - 1] + 1;
}
return 0;
};
const onVariableSaveHandler = (
mode: TVariableMode,
name: string,
variableData: IDashboardVariable,
oldName: string,
): void => {
const updatedVariableData = {
...variableData,
order: variableData?.order >= 0 ? variableData.order : getVariableOrder(),
};
const newVariablesArr = variablesTableData.map(
(variable: IDashboardVariable) => {
if (variable.id === updatedVariableData.id) {
return updatedVariableData;
}
return variable;
},
);
if (mode === 'ADD') {
newVariablesArr.push(updatedVariableData);
if (!variableData.name) {
return;
}
const variables = convertVariablesToDbFormat(newVariablesArr);
const newVariables = { ...variables };
newVariables[name] = variableData;
setVariablesTableData(newVariablesArr);
updateVariables(variables);
if (oldName) {
delete newVariables[oldName];
}
updateVariables(newVariables);
onDoneVariableViewMode();
};
const onVariableDeleteHandler = (variable: IDashboardVariable): void => {
variableToDelete.current = variable;
const onVariableDeleteHandler = (variableName: string): void => {
variableToDelete.current = variableName;
setDeleteVariableModal(true);
};
const handleDeleteConfirm = (): void => {
const newVariablesArr = variablesTableData.filter(
(variable: IDashboardVariable) =>
variable.id !== variableToDelete?.current?.id,
);
const updatedVariables = convertVariablesToDbFormat(newVariablesArr);
updateVariables(updatedVariables);
const newVariables = { ...variables };
if (variableToDelete?.current) delete newVariables[variableToDelete?.current];
updateVariables(newVariables);
variableToDelete.current = null;
setDeleteVariableModal(false);
};
@@ -241,36 +125,31 @@ function VariablesSetting(): JSX.Element {
setDeleteVariableModal(false);
};
const validateVariableName = (name: string): boolean =>
!existingVariableNamesMap[name];
const validateVariableName = (name: string): boolean => !variables[name];
const columns = [
{
key: 'sort',
width: '10%',
},
{
title: 'Variable',
dataIndex: 'name',
width: '40%',
width: 100,
key: 'name',
},
{
title: 'Description',
dataIndex: 'description',
width: '35%',
width: 100,
key: 'description',
},
{
title: 'Actions',
width: '15%',
width: 50,
key: 'action',
render: (variable: IDashboardVariable): JSX.Element => (
render: (_: IDashboardVariable): JSX.Element => (
<Space>
<Button
type="text"
style={{ padding: 8, cursor: 'pointer', color: blue[5] }}
onClick={(): void => onVariableViewModeEnter('EDIT', variable)}
onClick={(): void => onVariableViewModeEnter('EDIT', _)}
>
<PencilIcon size={14} />
</Button>
@@ -278,9 +157,7 @@ function VariablesSetting(): JSX.Element {
type="text"
style={{ padding: 8, color: red[6], cursor: 'pointer' }}
onClick={(): void => {
if (variable) {
onVariableDeleteHandler(variable);
}
if (_.name) onVariableDeleteHandler(_.name);
}}
>
<TrashIcon size={14} />
@@ -290,51 +167,6 @@ function VariablesSetting(): JSX.Element {
},
];
const sensors = useSensors(
useSensor(PointerSensor, {
activationConstraint: {
// https://docs.dndkit.com/api-documentation/sensors/pointer#activation-constraints
distance: 1,
},
}),
);
const onDragEnd = ({ active, over }: DragEndEvent): void => {
if (active.id !== over?.id) {
const activeIndex = variablesTableData.findIndex(
(i: { key: UniqueIdentifier }) => i.key === active.id,
);
const overIndex = variablesTableData.findIndex(
(i: { key: UniqueIdentifier | undefined }) => i.key === over?.id,
);
const updatedVariables: IDashboardVariable[] = arrayMove(
variablesTableData,
activeIndex,
overIndex,
);
const reArrangedVariables = {};
for (let index = 0; index < updatedVariables.length; index += 1) {
const variableName = updatedVariables[index].name;
if (variableName) {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
reArrangedVariables[variableName] = {
...updatedVariables[index],
order: index,
};
}
}
updateVariables(reArrangedVariables);
setVariablesTableData(updatedVariables);
}
};
return (
<>
{variableViewMode ? (
@@ -344,17 +176,11 @@ function VariablesSetting(): JSX.Element {
onSave={onVariableSaveHandler}
onCancel={onDoneVariableViewMode}
validateName={validateVariableName}
mode={variableViewMode}
variableViewMode={variableViewMode}
/>
) : (
<>
<Row
style={{
flexDirection: 'row',
justifyContent: 'flex-end',
padding: '0.5rem 0',
}}
>
<Row style={{ flexDirection: 'row-reverse', padding: '0.5rem 0' }}>
<Button
data-testid="add-new-variable"
type="primary"
@@ -366,28 +192,7 @@ function VariablesSetting(): JSX.Element {
</Button>
</Row>
<DndContext
sensors={sensors}
modifiers={[restrictToVerticalAxis]}
onDragEnd={onDragEnd}
>
<SortableContext
// rowKey array
items={variablesTableData.map((variable: { key: any }) => variable.key)}
>
<Table
components={{
body: {
row: TableRow,
},
}}
rowKey="key"
columns={columns}
pagination={false}
dataSource={variablesTableData}
/>
</SortableContext>
</DndContext>
<ResizeTable columns={columns} dataSource={variablesTableData} />
</>
)}
<Modal
@@ -397,13 +202,8 @@ function VariablesSetting(): JSX.Element {
onOk={handleDeleteConfirm}
onCancel={handleDeleteCancel}
>
<Typography.Text>
Are you sure you want to delete variable{' '}
<span className="delete-variable-name">
{variableToDelete?.current?.name}
</span>
?
</Typography.Text>
Are you sure you want to delete variable{' '}
<Tag>{variableToDelete.current}</Tag>?
</Modal>
</>
);

View File

@@ -1,7 +1 @@
export type TVariableMode = 'VIEW' | 'EDIT' | 'ADD';
export const VariableModes = {
VIEW: 'VIEW',
EDIT: 'EDIT',
ADD: 'ADD',
};
export type TVariableViewMode = 'EDIT' | 'ADD';

View File

@@ -1,14 +1,14 @@
import { Row } from 'antd';
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
import { useNotifications } from 'hooks/useNotifications';
import { map, sortBy } from 'lodash-es';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { memo, useEffect, useState } from 'react';
import { memo, useState } from 'react';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { Dashboard, IDashboardVariable } from 'types/api/dashboard/getAll';
import AppReducer from 'types/reducer/app';
import { convertVariablesToDbFormat } from './util';
import VariableItem from './VariableItem';
function DashboardVariableSelection(): JSX.Element | null {
@@ -21,32 +21,8 @@ function DashboardVariableSelection(): JSX.Element | null {
const [update, setUpdate] = useState<boolean>(false);
const [lastUpdatedVar, setLastUpdatedVar] = useState<string>('');
const [variablesTableData, setVariablesTableData] = useState<any>([]);
const { role } = useSelector<AppState, AppReducer>((state) => state.app);
useEffect(() => {
if (variables) {
const tableRowData = [];
// eslint-disable-next-line no-restricted-syntax
for (const [key, value] of Object.entries(variables)) {
const { id } = value;
tableRowData.push({
key,
name: key,
...variables[key],
id,
});
}
tableRowData.sort((a, b) => a.order - b.order);
setVariablesTableData(tableRowData);
}
}, [variables]);
const onVarChanged = (name: string): void => {
setLastUpdatedVar(name);
setUpdate(!update);
@@ -88,56 +64,40 @@ function DashboardVariableSelection(): JSX.Element | null {
const onValueUpdate = (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
): void => {
if (id) {
const newVariablesArr = variablesTableData.map(
(variable: IDashboardVariable) => {
const variableCopy = { ...variable };
const updatedVariablesData = { ...variables };
updatedVariablesData[name].selectedValue = value;
updatedVariablesData[name].allSelected = allSelected;
if (variableCopy.id === id) {
variableCopy.selectedValue = value;
variableCopy.allSelected = allSelected;
}
console.log('onValue Update', name);
return variableCopy;
},
);
const variables = convertVariablesToDbFormat(newVariablesArr);
if (role !== 'VIEWER' && selectedDashboard) {
updateVariables(name, variables);
}
onVarChanged(name);
setUpdate(!update);
if (role !== 'VIEWER' && selectedDashboard) {
updateVariables(name, updatedVariablesData);
}
onVarChanged(name);
setUpdate(!update);
};
if (!variables) {
return null;
}
const orderBasedSortedVariables = variablesTableData.sort(
(a: { order: number }, b: { order: number }) => a.order - b.order,
);
const variablesKeys = sortBy(Object.keys(variables));
return (
<Row>
{orderBasedSortedVariables &&
Array.isArray(orderBasedSortedVariables) &&
orderBasedSortedVariables.length > 0 &&
orderBasedSortedVariables.map((variable) => (
{variablesKeys &&
map(variablesKeys, (variableName) => (
<VariableItem
key={`${variable.name}${variable.id}}${variable.order}`}
key={`${variableName}${variables[variableName].modificationUUID}`}
existingVariables={variables}
lastUpdatedVar={lastUpdatedVar}
variableData={{
name: variable.name,
...variable,
name: variableName,
...variables[variableName],
change: update,
}}
onValueUpdate={onValueUpdate}

View File

@@ -14,7 +14,6 @@ import { IDashboardVariable } from 'types/api/dashboard/getAll';
import VariableItem from './VariableItem';
const mockVariableData: IDashboardVariable = {
id: 'test_variable',
description: 'Test Variable',
type: 'TEXTBOX',
textboxValue: 'defaultValue',
@@ -96,7 +95,6 @@ describe('VariableItem', () => {
// expect(mockOnValueUpdate).toHaveBeenCalledTimes(1);
expect(mockOnValueUpdate).toHaveBeenCalledWith(
'testVariable',
'test_variable',
'newValue',
false,
);

View File

@@ -2,14 +2,13 @@ import './DashboardVariableSelection.styles.scss';
import { orange } from '@ant-design/colors';
import { WarningOutlined } from '@ant-design/icons';
import { Input, Popover, Select, Tooltip, Typography } from 'antd';
import { Input, Popover, Select, Typography } from 'antd';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import useDebounce from 'hooks/useDebounce';
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import map from 'lodash-es/map';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { memo, useEffect, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
@@ -28,7 +27,6 @@ interface VariableItemProps {
existingVariables: Record<string, IDashboardVariable>;
onValueUpdate: (
name: string,
id: string,
arg1: IDashboardVariable['selectedValue'],
allSelected: boolean,
) => void;
@@ -50,7 +48,6 @@ function VariableItem({
onValueUpdate,
lastUpdatedVar,
}: VariableItemProps): JSX.Element {
const { isDashboardLocked } = useDashboard();
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
[],
);
@@ -140,9 +137,8 @@ function VariableItem({
} else {
[value] = newOptionsData;
}
if (variableData && variableData?.name && variableData?.id) {
onValueUpdate(variableData.name, variableData.id, value, allSelected);
if (variableData.name) {
onValueUpdate(variableData.name, value, allSelected);
}
}
@@ -153,13 +149,14 @@ function VariableItem({
console.error(e);
}
} else if (variableData.type === 'CUSTOM') {
const optionsData = sortValues(
commaValuesParser(variableData.customValue || ''),
variableData.sort,
) as never;
setOptionsData(optionsData);
setOptionsData(
sortValues(
commaValuesParser(variableData.customValue || ''),
variableData.sort,
) as never,
);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
};
const { isLoading } = useQuery(getQueryKey(variableData), {
@@ -198,9 +195,9 @@ function VariableItem({
(Array.isArray(value) && value.includes(ALL_SELECT_VALUE)) ||
(Array.isArray(value) && value.length === 0)
) {
onValueUpdate(variableData.name, variableData.id, optionsData, true);
onValueUpdate(variableData.name, optionsData, true);
} else {
onValueUpdate(variableData.name, variableData.id, value, false);
onValueUpdate(variableData.name, value, false);
}
};
@@ -233,79 +230,72 @@ function VariableItem({
getOptions(null);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [variableData.type, variableData.customValue]);
}, []);
return (
<Tooltip
placement="top"
title={isDashboardLocked ? 'Dashboard is locked' : ''}
>
<VariableContainer>
<Typography.Text className="variable-name" ellipsis>
${variableData.name}
</Typography.Text>
<VariableValue>
{variableData.type === 'TEXTBOX' ? (
<Input
placeholder="Enter value"
disabled={isDashboardLocked}
<VariableContainer>
<Typography.Text className="variable-name" ellipsis>
${variableData.name}
</Typography.Text>
<VariableValue>
{variableData.type === 'TEXTBOX' ? (
<Input
placeholder="Enter value"
bordered={false}
value={variableValue}
onChange={(e): void => {
setVaribleValue(e.target.value || '');
}}
style={{
width:
50 + ((variableData.selectedValue?.toString()?.length || 0) * 7 || 50),
}}
/>
) : (
!errorMessage &&
optionsData && (
<Select
value={selectValue}
onChange={handleChange}
bordered={false}
value={variableValue}
onChange={(e): void => {
setVaribleValue(e.target.value || '');
}}
style={{
width:
50 + ((variableData.selectedValue?.toString()?.length || 0) * 7 || 50),
}}
/>
) : (
!errorMessage &&
optionsData && (
<Select
value={selectValue}
onChange={handleChange}
bordered={false}
placeholder="Select value"
mode={mode}
dropdownMatchSelectWidth={false}
style={SelectItemStyle}
loading={isLoading}
showArrow
showSearch
data-testid="variable-select"
disabled={isDashboardLocked}
>
{enableSelectAll && (
<Select.Option data-testid="option-ALL" value={ALL_SELECT_VALUE}>
ALL
</Select.Option>
)}
{map(optionsData, (option) => (
<Select.Option
data-testid={`option-${option}`}
key={option.toString()}
value={option}
>
{option.toString()}
</Select.Option>
))}
</Select>
)
)}
{variableData.type !== 'TEXTBOX' && errorMessage && (
<span style={{ margin: '0 0.5rem' }}>
<Popover
placement="top"
content={<Typography>{errorMessage}</Typography>}
>
<WarningOutlined style={{ color: orange[5] }} />
</Popover>
</span>
)}
</VariableValue>
</VariableContainer>
</Tooltip>
placeholder="Select value"
mode={mode}
dropdownMatchSelectWidth={false}
style={SelectItemStyle}
loading={isLoading}
showArrow
showSearch
data-testid="variable-select"
>
{enableSelectAll && (
<Select.Option data-testid="option-ALL" value={ALL_SELECT_VALUE}>
ALL
</Select.Option>
)}
{map(optionsData, (option) => (
<Select.Option
data-testid={`option-${option}`}
key={option.toString()}
value={option}
>
{option.toString()}
</Select.Option>
))}
</Select>
)
)}
{errorMessage && (
<span style={{ margin: '0 0.5rem' }}>
<Popover
placement="top"
content={<Typography>{errorMessage}</Typography>}
>
<WarningOutlined style={{ color: orange[5] }} />
</Popover>
</span>
)}
</VariableValue>
</VariableContainer>
);
}

View File

@@ -1,5 +1,3 @@
import { Dashboard, IDashboardVariable } from 'types/api/dashboard/getAll';
export function areArraysEqual(
a: (string | number | boolean)[],
b: (string | number | boolean)[],
@@ -16,16 +14,3 @@ export function areArraysEqual(
return true;
}
export const convertVariablesToDbFormat = (
variblesArr: IDashboardVariable[],
): Dashboard['data']['variables'] =>
variblesArr.reduce((result, obj: IDashboardVariable) => {
const { id } = obj;
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
// eslint-disable-next-line no-param-reassign
result[id] = obj;
return result;
}, {});

View File

@@ -6,10 +6,8 @@ export function variablePropsToPayloadVariables(
): PayloadVariables {
const payloadVariables: PayloadVariables = {};
Object.entries(variables).forEach(([, value]) => {
if (value?.name) {
payloadVariables[value.name] = value?.selectedValue;
}
Object.entries(variables).forEach(([key, value]) => {
payloadVariables[key] = value?.selectedValue;
});
return payloadVariables;

View File

@@ -6,16 +6,13 @@ import { useResizeObserver } from 'hooks/useDimensions';
import useUrlQuery from 'hooks/useUrlQuery';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useCallback, useMemo, useRef } from 'react';
import { UseQueryResult } from 'react-query';
import { useDispatch, useSelector } from 'react-redux';
import { useDispatch } from 'react-redux';
import { UpdateTimeInterval } from 'store/actions';
import { AppState } from 'store/reducers';
import { SuccessResponse } from 'types/api';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { GlobalReducer } from 'types/reducer/globalTime';
import { getTimeRange } from 'utils/getTimeRange';
function WidgetGraph({
getWidgetQueryRange,
@@ -26,21 +23,6 @@ function WidgetGraph({
}: WidgetGraphProps): JSX.Element {
const { stagedQuery } = useQueryBuilder();
const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
AppState,
GlobalReducer
>((state) => state.globalTime);
const [minTimeScale, setMinTimeScale] = useState<number>();
const [maxTimeScale, setMaxTimeScale] = useState<number>();
useEffect((): void => {
const { startTime, endTime } = getTimeRange(getWidgetQueryRange);
setMinTimeScale(startTime);
setMaxTimeScale(endTime);
}, [getWidgetQueryRange, maxTime, minTime, globalSelectedInterval]);
const graphRef = useRef<HTMLDivElement>(null);
const containerDimensions = useResizeObserver(graphRef);
@@ -81,8 +63,6 @@ function WidgetGraph({
onDragSelect,
thresholds,
fillSpans,
minTimeScale,
maxTimeScale,
}),
[
widgetId,
@@ -93,8 +73,6 @@ function WidgetGraph({
onDragSelect,
thresholds,
fillSpans,
minTimeScale,
maxTimeScale,
],
);

View File

@@ -12,7 +12,8 @@ export const Container = styled(Card)<Props>`
}
.ant-card-body {
padding: 8px;
padding: ${({ $panelType }): string =>
$panelType === PANEL_TYPES.TABLE ? '0 0' : '1.5rem 0'};
height: 57vh;
overflow: auto;
display: flex;

View File

@@ -1,6 +1,5 @@
/* eslint-disable sonarjs/cognitive-complexity */
import { LockFilled, WarningOutlined } from '@ant-design/icons';
import { Button, Modal, Space, Tooltip, Typography } from 'antd';
import { LockFilled } from '@ant-design/icons';
import { Button, Modal, Tooltip, Typography } from 'antd';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { FeatureKeys } from 'constants/features';
import { PANEL_TYPES } from 'constants/queryBuilder';
@@ -19,7 +18,6 @@ import {
getSelectedWidgetIndex,
} from 'providers/Dashboard/util';
import { useCallback, useMemo, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux';
import { generatePath, useLocation, useParams } from 'react-router-dom';
import { AppState } from 'store/reducers';
@@ -41,7 +39,6 @@ import {
RightContainerWrapper,
} from './styles';
import { NewWidgetProps } from './types';
import { getIsQueryModified } from './utils';
function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
const {
@@ -50,14 +47,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
setToScrollWidgetId,
} = useDashboard();
const { t } = useTranslation(['dashboard']);
const { currentQuery, stagedQuery } = useQueryBuilder();
const isQueryModified = useMemo(
() => getIsQueryModified(currentQuery, stagedQuery),
[currentQuery, stagedQuery],
);
const { currentQuery } = useQueryBuilder();
const { featureResponse } = useSelector<AppState, AppReducer>(
(state) => state.app,
@@ -102,12 +92,6 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
selectedWidget?.fillSpans || false,
);
const [saveModal, setSaveModal] = useState(false);
const [discardModal, setDiscardModal] = useState(false);
const closeModal = (): void => {
setSaveModal(false);
setDiscardModal(false);
};
const [graphType, setGraphType] = useState(selectedGraph);
@@ -222,14 +206,6 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
]);
const onClickDiscardHandler = useCallback(() => {
if (isQueryModified) {
setDiscardModal(true);
return;
}
history.push(generatePath(ROUTES.DASHBOARD, { dashboardId }));
}, [dashboardId, isQueryModified]);
const discardChanges = useCallback(() => {
history.push(generatePath(ROUTES.DASHBOARD, { dashboardId }));
}, [dashboardId]);
@@ -345,54 +321,21 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
</RightContainerWrapper>
</PanelContainer>
<Modal
title={
isQueryModified ? (
<Space>
<WarningOutlined style={{ fontSize: '16px', color: '#fdd600' }} />
Unsaved Changes
</Space>
) : (
'Save Widget'
)
}
title="Save Changes"
focusTriggerAfterClose
forceRender
destroyOnClose
closable
onCancel={closeModal}
onCancel={(): void => setSaveModal(false)}
onOk={onClickSaveHandler}
centered
open={saveModal}
width={600}
>
{!isQueryModified ? (
<Typography>
{t('your_graph_build_with')}{' '}
<QueryTypeTag queryType={currentQuery.queryType} />
{t('dashboar_ok_confirm')}
</Typography>
) : (
<Typography>{t('dashboard_unsave_changes')} </Typography>
)}
</Modal>
<Modal
title={
<Space>
<WarningOutlined style={{ fontSize: '16px', color: '#fdd600' }} />
Unsaved Changes
</Space>
}
focusTriggerAfterClose
forceRender
destroyOnClose
closable
onCancel={closeModal}
onOk={discardChanges}
centered
open={discardModal}
width={600}
>
<Typography>{t('dashboard_unsave_changes')}</Typography>
<Typography>
Your graph built with <QueryTypeTag queryType={currentQuery.queryType} />{' '}
query will be saved. Press OK to confirm.
</Typography>
</Modal>
</Container>
);

View File

@@ -1,15 +0,0 @@
import { omitIdFromQuery } from 'components/ExplorerCard/utils';
import { isEqual } from 'lodash-es';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
export const getIsQueryModified = (
currentQuery: Query,
stagedQuery: Query | null,
): boolean => {
if (!stagedQuery) {
return false;
}
const omitIdFromStageQuery = omitIdFromQuery(stagedQuery);
const omitIdFromCurrentQuery = omitIdFromQuery(currentQuery);
return !isEqual(omitIdFromStageQuery, omitIdFromCurrentQuery);
};

View File

@@ -71,8 +71,8 @@ export default function ModuleStepsContainer({
} = useOnboardingContext();
const [current, setCurrent] = useState(0);
const { trackEvent } = useAnalytics();
const [metaData, setMetaData] = useState<MetaDataProps[]>(defaultMetaData);
const { trackEvent } = useAnalytics();
const lastStepIndex = selectedModuleSteps.length - 1;
const isValidForm = (): boolean => {

View File

@@ -3,13 +3,9 @@ import Uplot from 'components/Uplot';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { useEffect, useMemo, useRef, useState } from 'react';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { useMemo, useRef } from 'react';
import { SuccessResponse } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { GlobalReducer } from 'types/reducer/globalTime';
import { getTimeRange } from 'utils/getTimeRange';
import { Container, ErrorText } from './styles';
@@ -35,21 +31,6 @@ function TimeSeriesView({
? graphRef.current.clientHeight
: 300;
const [minTimeScale, setMinTimeScale] = useState<number>();
const [maxTimeScale, setMaxTimeScale] = useState<number>();
const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
AppState,
GlobalReducer
>((state) => state.globalTime);
useEffect((): void => {
const { startTime, endTime } = getTimeRange();
setMinTimeScale(startTime);
setMaxTimeScale(endTime);
}, [maxTime, minTime, globalSelectedInterval, data]);
const chartOptions = getUPlotChartOptions({
yAxisUnit: yAxisUnit || '',
apiResponse: data?.payload,
@@ -58,8 +39,6 @@ function TimeSeriesView({
height,
},
isDarkMode,
minTimeScale,
maxTimeScale,
});
return (

View File

@@ -3,16 +3,12 @@ import { Button, Select as DefaultSelect } from 'antd';
import getLocalStorageKey from 'api/browser/localstorage/get';
import setLocalStorageKey from 'api/browser/localstorage/set';
import { LOCALSTORAGE } from 'constants/localStorage';
import { QueryParams } from 'constants/query';
import ROUTES from 'constants/routes';
import dayjs, { Dayjs } from 'dayjs';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { updateStepInterval } from 'hooks/queryBuilder/useStepInterval';
import useUrlQuery from 'hooks/useUrlQuery';
import GetMinMax from 'lib/getMinMax';
import getTimeString from 'lib/getTimeString';
import history from 'lib/history';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useCallback, useEffect, useState } from 'react';
import { connect, useSelector } from 'react-redux';
import { RouteComponentProps, withRouter } from 'react-router-dom';
import { bindActionCreators, Dispatch } from 'redux';
@@ -38,9 +34,9 @@ function DateTimeSelection({
}: Props): JSX.Element {
const [formSelector] = Form.useForm();
const urlQuery = useUrlQuery();
const searchStartTime = urlQuery.get('startTime');
const searchEndTime = urlQuery.get('endTime');
const params = new URLSearchParams(location.search);
const searchStartTime = params.get('startTime');
const searchEndTime = params.get('endTime');
const localstorageStartTime = getLocalStorageKey('startTime');
const localstorageEndTime = getLocalStorageKey('endTime');
@@ -173,11 +169,6 @@ function DateTimeSelection({
return `Last refresh - ${secondsDiff} sec ago`;
}, [maxTime, minTime, selectedTime]);
const isLogsExplorerPage = useMemo(
() => location.pathname === ROUTES.LOGS_EXPLORER,
[location.pathname],
);
const onSelectHandler = (value: Time): void => {
if (value !== 'custom') {
updateTimeInterval(value);
@@ -190,18 +181,12 @@ function DateTimeSelection({
setCustomDTPickerVisible(true);
}
const { maxTime, minTime } = GetMinMax(value, getTime());
if (!isLogsExplorerPage) {
urlQuery.set(QueryParams.startTime, minTime.toString());
urlQuery.set(QueryParams.endTime, maxTime.toString());
const generatedUrl = `${location.pathname}?${urlQuery.toString()}`;
history.replace(generatedUrl);
}
if (!stagedQuery) {
return;
}
const { maxTime, minTime } = GetMinMax(value, getTime());
initQueryBuilderData(updateStepInterval(stagedQuery, maxTime, minTime));
};
@@ -222,12 +207,6 @@ function DateTimeSelection({
setLocalStorageKey('startTime', startTimeMoment.toString());
setLocalStorageKey('endTime', endTimeMoment.toString());
updateLocalStorageForRoutes('custom');
if (!isLogsExplorerPage) {
urlQuery.set(QueryParams.startTime, startTimeMoment.toString());
urlQuery.set(QueryParams.endTime, endTimeMoment.toString());
const generatedUrl = `${location.pathname}?${urlQuery.toString()}`;
history.replace(generatedUrl);
}
}
}
};
@@ -255,6 +234,7 @@ function DateTimeSelection({
if (searchEndTime !== null && searchStartTime !== null) {
return 'custom';
}
if (
(localstorageEndTime === null || localstorageStartTime === null) &&
time === 'custom'
@@ -272,8 +252,16 @@ function DateTimeSelection({
setRefreshButtonHidden(updatedTime === 'custom');
updateTimeInterval(updatedTime, [preStartTime, preEndTime]);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [location.pathname, updateTimeInterval, globalTimeLoading]);
}, [
location.pathname,
getTime,
localstorageEndTime,
localstorageStartTime,
searchEndTime,
searchStartTime,
updateTimeInterval,
globalTimeLoading,
]);
return (
<>

View File

@@ -3,7 +3,6 @@ import ROUTES from 'constants/routes';
import { useNotifications } from 'hooks/useNotifications';
import useUrlQuery from 'hooks/useUrlQuery';
import useUrlQueryData from 'hooks/useUrlQueryData';
import history from 'lib/history';
import {
MouseEventHandler,
useCallback,
@@ -29,27 +28,16 @@ export const useCopyLogLink = (logId?: string): UseCopyLogLink => {
(state) => state.globalTime,
);
const { queryData: timeRange } = useUrlQueryData<LogTimeRange | null>(
QueryParams.timeRange,
null,
);
const {
queryData: timeRange,
redirectWithQuery: onTimeRangeChange,
} = useUrlQueryData<LogTimeRange | null>(QueryParams.timeRange, null);
const { queryData: activeLogId } = useUrlQueryData<string | null>(
QueryParams.activeLogId,
null,
);
const onTimeRangeChange = useCallback(
(newTimeRange: LogTimeRange | null): void => {
urlQuery.set(QueryParams.timeRange, JSON.stringify(newTimeRange));
urlQuery.set(QueryParams.startTime, newTimeRange?.start.toString() || '');
urlQuery.set(QueryParams.endTime, newTimeRange?.end.toString() || '');
const generatedUrl = `${pathname}?${urlQuery.toString()}`;
history.replace(generatedUrl);
},
[pathname, urlQuery],
);
const isActiveLog = useMemo(() => activeLogId === logId, [activeLogId, logId]);
const [isHighlighted, setIsHighlighted] = useState<boolean>(isActiveLog);

View File

@@ -1 +0,0 @@
{"/en-GB/alerts":"37ea40b758e14f100b970178809147d7","/en-GB/channels":"b855a58fce92ff62a0ce50cc40d8da0b","/en-GB/common":"d918932fcd1d34b2d84cb463812bd157","/en-GB/dashboard":"9ec66badfc02995263cf108615f6380c","/en-GB/errorDetails":"a1a1ea54ed8adc720e7942c42ce4be0f","/en-GB/explorer":"98106bbc79e701d81f5731dd53a158f0","/en-GB/generalSettings":"65fca62d2f109d73fa4bdc447c353857","/en-GB/licenses":"dc2fea934c67b5b3bf8c940019d820cd","/en-GB/login":"c9d63ef04a9af5ae6aed12b4b725add5","/en-GB/logs":"de363f7feee26d9fc72eccdf69988f09","/en-GB/organizationsettings":"e24624bba7bdd7bf071873940742b1a8","/en-GB/routes":"08585a25257ed898131ba43e4c927d7e","/en-GB/rules":"f134663a0943cdb8cd2a2c169f27ba90","/en-GB/settings":"e2c4003664cc9ba476b658f1e6304fe5","/en-GB/signup":"59c64809b8b4c7b1b8902da5aa2315f0","/en-GB/titles":"9e0515203efab287fdd50afb96bde8c8","/en-GB/trace":"fcf3fda7bee8b609b5a3ab0f749f2594","/en-GB/traceDetails":"f91795f15c286f15bf630a454febb015","/en-GB/translation":"532cce878c691d9ff3c689a73279fd2e","/en/alerts":"37ea40b758e14f100b970178809147d7","/en/channels":"c9bfbd14bb4d3f38e2a669f5fbfadc17","/en/common":"d918932fcd1d34b2d84cb463812bd157","/en/dashboard":"6de8356a6ed53c109746c0f7ef37ffcf","/en/errorDetails":"eb83b35f49830420547f30c08bd88c4e","/en/explorer":"98106bbc79e701d81f5731dd53a158f0","/en/generalSettings":"65fca62d2f109d73fa4bdc447c353857","/en/licenses":"dc2fea934c67b5b3bf8c940019d820cd","/en/login":"c9d63ef04a9af5ae6aed12b4b725add5","/en/logs":"de363f7feee26d9fc72eccdf69988f09","/en/organizationsettings":"e24624bba7bdd7bf071873940742b1a8","/en/pipeline":"9f75c31214b2ae9d362bb6e5985c2e1f","/en/routes":"08585a25257ed898131ba43e4c927d7e","/en/rules":"f134663a0943cdb8cd2a2c169f27ba90","/en/settings":"ffabe7ca89d7992d9639695b4df4d6e9","/en/signup":"59c64809b8b4c7b1b8902da5aa2315f0","/en/titles":"49d542f8f3ca9291777b9042ed8faf1a","/en/trace":"fcf3fda7bee8b609b5a3ab0f749f2594","/en/traceDetails":"f91795f15c286f15bf630a454febb015","/en/translation":"921a0256c8d4d3522754557b41e24362","/en/valueGraph":"cc57d9b83919574016dab2fc9e5adedf"}

View File

@@ -20,13 +20,9 @@ export const getDashboardVariables = (
SIGNOZ_START_TIME: parseInt(start, 10) * 1e3,
SIGNOZ_END_TIME: parseInt(end, 10) * 1e3,
};
Object.entries(variables).forEach(([, value]) => {
if (value?.name) {
variablesTuple[value.name] = value?.selectedValue;
}
Object.keys(variables).forEach((key) => {
variablesTuple[key] = variables[key].selectedValue;
});
return variablesTuple;
} catch (e) {
console.error(e);

View File

@@ -1,4 +1,3 @@
/* eslint-disable no-param-reassign */
/* eslint-disable @typescript-eslint/ban-ts-comment */
// @ts-nocheck
/* eslint-disable sonarjs/cognitive-complexity */
@@ -16,7 +15,6 @@ import onClickPlugin, { OnClickPluginOpts } from './plugins/onClickPlugin';
import tooltipPlugin from './plugins/tooltipPlugin';
import getAxes from './utils/getAxes';
import getSeries from './utils/getSeriesData';
import { getXAxisScale } from './utils/getXAxisScale';
import { getYAxisScale } from './utils/getYAxisScale';
interface GetUPlotChartOptions {
@@ -33,8 +31,6 @@ interface GetUPlotChartOptions {
thresholdValue?: number;
thresholdText?: string;
fillSpans?: boolean;
minTimeScale?: number;
maxTimeScale?: number;
}
export const getUPlotChartOptions = ({
@@ -44,24 +40,21 @@ export const getUPlotChartOptions = ({
apiResponse,
onDragSelect,
yAxisUnit,
minTimeScale,
maxTimeScale,
onClickHandler = _noop,
graphsVisibilityStates,
setGraphsVisibilityStates,
thresholds,
fillSpans,
}: GetUPlotChartOptions): uPlot.Options => {
const timeScaleProps = getXAxisScale(minTimeScale, maxTimeScale);
return {
// eslint-disable-next-line sonarjs/prefer-immediate-return
const chartOptions = {
id,
width: dimensions.width,
height: dimensions.height - 30,
height: dimensions.height - 45,
// tzDate: (ts) => uPlot.tzDate(new Date(ts * 1e3), ''), // Pass timezone for 2nd param
legend: {
show: true,
live: false,
isolate: true,
},
focus: {
alpha: 0.3,
@@ -73,18 +66,18 @@ export const getUPlotChartOptions = ({
bias: 1,
},
points: {
size: (u, seriesIdx): number => u.series[seriesIdx].points.size * 3,
size: (u, seriesIdx): number => u.series[seriesIdx].points.size * 2.5,
width: (u, seriesIdx, size): number => size / 4,
stroke: (u, seriesIdx): string =>
`${u.series[seriesIdx].points.stroke(u, seriesIdx)}90`,
fill: (): string => '#fff',
},
},
padding: [16, 16, 8, 8],
padding: [16, 16, 16, 16],
scales: {
x: {
spanGaps: true,
...timeScaleProps,
time: true,
auto: true, // Automatically adjust scale range
},
y: {
...getYAxisScale(
@@ -165,24 +158,16 @@ export const getUPlotChartOptions = ({
(self): void => {
const legend = self.root.querySelector('.u-legend');
if (legend) {
const seriesEls = legend.querySelectorAll('.u-series');
const seriesEls = legend.querySelectorAll('.u-label');
const seriesArray = Array.from(seriesEls);
seriesArray.forEach((seriesEl, index) => {
seriesEl.addEventListener('click', () => {
if (graphsVisibilityStates) {
setGraphsVisibilityStates?.((prev) => {
const newGraphVisibilityStates = [...prev];
if (
newGraphVisibilityStates[index + 1] &&
newGraphVisibilityStates.every((value, i) =>
i === index + 1 ? value : !value,
)
) {
newGraphVisibilityStates.fill(true);
} else {
newGraphVisibilityStates.fill(false);
newGraphVisibilityStates[index + 1] = true;
}
newGraphVisibilityStates[index + 1] = !newGraphVisibilityStates[
index + 1
];
return newGraphVisibilityStates;
});
}
@@ -200,4 +185,6 @@ export const getUPlotChartOptions = ({
),
axes: getAxes(isDarkMode, yAxisUnit),
};
return chartOptions;
};

View File

@@ -54,7 +54,7 @@ const generateTooltipContent = (
const value = data[index][idx];
const label = getLabelName(metric, queryName || '', legend || '');
if (Number.isFinite(value)) {
if (value) {
const tooltipValue = getToolTipValue(value, yAxisUnit);
const dataObj = {
@@ -191,8 +191,7 @@ const tooltipPlugin = (
if (overlay) {
overlay.textContent = '';
const { left, top, idx } = u.cursor;
if (Number.isInteger(idx)) {
if (idx) {
const anchor = { left: left + bLeft, top: top + bTop };
const content = generateTooltipContent(
apiResult,

View File

@@ -9,7 +9,8 @@ const getAxes = (isDarkMode: boolean, yAxisUnit?: string): any => [
stroke: isDarkMode ? 'white' : 'black', // Color of the axis line
grid: {
stroke: getGridColor(isDarkMode), // Color of the grid lines
width: 0.2, // Width of the grid lines,
dash: [10, 10], // Dash pattern for grid lines,
width: 0.5, // Width of the grid lines,
show: true,
},
ticks: {
@@ -23,7 +24,8 @@ const getAxes = (isDarkMode: boolean, yAxisUnit?: string): any => [
stroke: isDarkMode ? 'white' : 'black', // Color of the axis line
grid: {
stroke: getGridColor(isDarkMode), // Color of the grid lines
width: 0.2, // Width of the grid lines
dash: [10, 10], // Dash pattern for grid lines,
width: 0.3, // Width of the grid lines
},
ticks: {
// stroke: isDarkMode ? 'white' : 'black', // Color of the tick lines

View File

@@ -1,8 +1,8 @@
const getGridColor = (isDarkMode: boolean): string => {
if (isDarkMode) {
return 'rgba(231,233,237,0.3)';
return 'rgba(231,233,237,0.2)';
}
return 'rgba(0,0,0,0.5)';
return 'rgba(231,233,237,0.8)';
};
export default getGridColor;

View File

@@ -46,22 +46,17 @@ const getSeries = (
legend || '',
);
const pointSize = seriesList[i].values.length > 1 ? 5 : 10;
const showPoints = !(seriesList[i].values.length > 1);
const seriesObj: any = {
width: 1.4,
paths,
drawStyle: drawStyles.line,
lineInterpolation: lineInterpolations.spline,
show: newGraphVisibilityStates ? newGraphVisibilityStates[i] : true,
label,
stroke: color,
width: 2,
spanGaps: true,
points: {
size: pointSize,
show: showPoints,
stroke: color,
show: false,
},
};

View File

@@ -1,40 +0,0 @@
function getFallbackMinMaxTimeStamp(): {
fallbackMin: number;
fallbackMax: number;
} {
const currentDate = new Date();
// Get the Unix timestamp (milliseconds since January 1, 1970)
const currentTime = currentDate.getTime();
const currentUnixTimestamp = Math.floor(currentTime / 1000);
// Calculate the date and time one day ago
const oneDayAgoUnixTimestamp = Math.floor(
(currentDate.getTime() - 86400000) / 1000,
); // 86400000 milliseconds in a day
return {
fallbackMin: oneDayAgoUnixTimestamp,
fallbackMax: currentUnixTimestamp,
};
}
export const getXAxisScale = (
minTimeScale: number,
maxTimeScale: number,
): {
time: boolean;
auto: boolean;
range?: [number, number];
} => {
let minTime = minTimeScale;
let maxTime = maxTimeScale;
if (!minTimeScale || !maxTimeScale) {
const { fallbackMin, fallbackMax } = getFallbackMinMaxTimeStamp();
minTime = fallbackMin;
maxTime = fallbackMax;
}
return { time: true, auto: false, range: [minTime, maxTime] };
};

View File

@@ -54,12 +54,7 @@ function getRange(
const [minSeriesValue, maxSeriesValue] = findMinMaxValues(series);
const min = Math.min(minThresholdValue, minSeriesValue);
let max = Math.max(maxThresholdValue, maxSeriesValue);
// this is a temp change, we need to figure out a generic way to better handle ranges based on yAxisUnit
if (yAxisUnit === 'percentunit' && max < 1) {
max = 1;
}
const max = Math.max(maxThresholdValue, maxSeriesValue);
return [min, max];
}

View File

@@ -1,4 +1,4 @@
import { Modal } from 'antd';
import Modal from 'antd/es/modal';
import getDashboard from 'api/dashboard/get';
import lockDashboardApi from 'api/dashboard/lockDashboard';
import unlockDashboardApi from 'api/dashboard/unlockDashboard';
@@ -30,10 +30,9 @@ import { Dispatch } from 'redux';
import { AppState } from 'store/reducers';
import AppActions from 'types/actions';
import { UPDATE_TIME_INTERVAL } from 'types/actions/globalTime';
import { Dashboard, IDashboardVariable } from 'types/api/dashboard/getAll';
import { Dashboard } from 'types/api/dashboard/getAll';
import AppReducer from 'types/reducer/app';
import { GlobalReducer } from 'types/reducer/globalTime';
import { v4 as generateUUID } from 'uuid';
import { IDashboardContext } from './types';
@@ -103,44 +102,6 @@ export function DashboardProvider({
const { t } = useTranslation(['dashboard']);
const dashboardRef = useRef<Dashboard>();
// As we do not have order and ID's in the variables object, we have to process variables to add order and ID if they do not exist in the variables object
// eslint-disable-next-line sonarjs/cognitive-complexity
const transformDashboardVariables = (data: Dashboard): Dashboard => {
if (data && data.data && data.data.variables) {
const clonedDashboardData = JSON.parse(JSON.stringify(data));
const { variables } = clonedDashboardData.data;
const existingOrders: Set<number> = new Set();
// eslint-disable-next-line no-restricted-syntax
for (const key in variables) {
// eslint-disable-next-line no-prototype-builtins
if (variables.hasOwnProperty(key)) {
const variable: IDashboardVariable = variables[key];
// Check if 'order' property doesn't exist or is undefined
if (variable.order === undefined) {
// Find a unique order starting from 0
let order = 0;
while (existingOrders.has(order)) {
order += 1;
}
variable.order = order;
existingOrders.add(order);
}
if (variable.id === undefined) {
variable.id = generateUUID();
}
}
}
return clonedDashboardData;
}
return data;
};
const dashboardResponse = useQuery(
[REACT_QUERY_KEY.DASHBOARD_BY_ID, isDashboardPage?.params],
{
@@ -151,27 +112,26 @@ export function DashboardProvider({
}),
refetchOnWindowFocus: false,
onSuccess: (data) => {
const updatedDashboardData = transformDashboardVariables(data);
const updatedDate = dayjs(updatedDashboardData.updated_at);
const updatedDate = dayjs(data.updated_at);
setIsDashboardLocked(updatedDashboardData?.isLocked || false);
setIsDashboardLocked(data?.isLocked || false);
// on first render
if (updatedTimeRef.current === null) {
setSelectedDashboard(updatedDashboardData);
setSelectedDashboard(data);
updatedTimeRef.current = updatedDate;
dashboardRef.current = updatedDashboardData;
dashboardRef.current = data;
setLayouts(getUpdatedLayout(updatedDashboardData.data.layout));
setLayouts(getUpdatedLayout(data.data.layout));
}
if (
updatedTimeRef.current !== null &&
updatedDate.isAfter(updatedTimeRef.current) &&
isVisible &&
dashboardRef.current?.id === updatedDashboardData.id
dashboardRef.current?.id === data.id
) {
// show modal when state is out of sync
const modal = onModal.confirm({
@@ -179,7 +139,7 @@ export function DashboardProvider({
title: t('dashboard_has_been_updated'),
content: t('do_you_want_to_refresh_the_dashboard'),
onOk() {
setSelectedDashboard(updatedDashboardData);
setSelectedDashboard(data);
const { maxTime, minTime } = getMinMax(
globalTime.selectedTime,
@@ -196,32 +156,32 @@ export function DashboardProvider({
},
});
dashboardRef.current = updatedDashboardData;
dashboardRef.current = data;
updatedTimeRef.current = dayjs(updatedDashboardData.updated_at);
updatedTimeRef.current = dayjs(data.updated_at);
setLayouts(getUpdatedLayout(updatedDashboardData.data.layout));
setLayouts(getUpdatedLayout(data.data.layout));
},
});
modalRef.current = modal;
} else {
// normal flow
updatedTimeRef.current = dayjs(updatedDashboardData.updated_at);
updatedTimeRef.current = dayjs(data.updated_at);
dashboardRef.current = updatedDashboardData;
dashboardRef.current = data;
if (!isEqual(selectedDashboard, updatedDashboardData)) {
setSelectedDashboard(updatedDashboardData);
if (!isEqual(selectedDashboard, data)) {
setSelectedDashboard(data);
}
if (
!isEqual(
[omitBy(layouts, (value): boolean => isUndefined(value))[0]],
updatedDashboardData.data.layout,
data.data.layout,
)
) {
setLayouts(getUpdatedLayout(updatedDashboardData.data.layout));
setLayouts(getUpdatedLayout(data.data.layout));
}
}
},

View File

@@ -14,8 +14,6 @@ export const VariableSortTypeArr = ['DISABLED', 'ASC', 'DESC'] as const;
export type TSortVariableValuesType = typeof VariableSortTypeArr[number];
export interface IDashboardVariable {
id: string;
order?: any;
name?: string; // key will be the source of truth
description: string;
type: TVariableQueryType;

View File

@@ -1,36 +0,0 @@
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
import { UseQueryResult } from 'react-query';
import store from 'store';
import { SuccessResponse } from 'types/api';
import {
MetricRangePayloadProps,
QueryRangePayload,
} from 'types/api/metrics/getQueryRange';
export const getTimeRange = (
widgetQueryRange?: UseQueryResult<
SuccessResponse<MetricRangePayloadProps, unknown>,
Error
>,
): Record<string, number> => {
const widgetParams =
(widgetQueryRange?.data?.params as QueryRangePayload) || null;
if (widgetParams && widgetParams?.start && widgetParams?.end) {
return {
startTime: widgetParams.start / 1000,
endTime: widgetParams.end / 1000,
};
}
const { globalTime } = store.getState();
const { start: globalStartTime, end: globalEndTime } = getStartEndRangeTime({
type: 'GLOBAL_TIME',
interval: globalTime.selectedTime,
});
return {
startTime: (parseInt(globalStartTime, 10) * 1e3) / 1000,
endTime: (parseInt(globalEndTime, 10) * 1e3) / 1000,
};
};

View File

@@ -2346,45 +2346,6 @@
resolved "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz"
integrity sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==
"@dnd-kit/accessibility@^3.1.0":
version "3.1.0"
resolved "https://registry.yarnpkg.com/@dnd-kit/accessibility/-/accessibility-3.1.0.tgz#1054e19be276b5f1154ced7947fc0cb5d99192e0"
integrity sha512-ea7IkhKvlJUv9iSHJOnxinBcoOI3ppGnnL+VDJ75O45Nss6HtZd8IdN8touXPDtASfeI2T2LImb8VOZcL47wjQ==
dependencies:
tslib "^2.0.0"
"@dnd-kit/core@6.1.0":
version "6.1.0"
resolved "https://registry.yarnpkg.com/@dnd-kit/core/-/core-6.1.0.tgz#e81a3d10d9eca5d3b01cbf054171273a3fe01def"
integrity sha512-J3cQBClB4TVxwGo3KEjssGEXNJqGVWx17aRTZ1ob0FliR5IjYgTxl5YJbKTzA6IzrtelotH19v6y7uoIRUZPSg==
dependencies:
"@dnd-kit/accessibility" "^3.1.0"
"@dnd-kit/utilities" "^3.2.2"
tslib "^2.0.0"
"@dnd-kit/modifiers@7.0.0":
version "7.0.0"
resolved "https://registry.yarnpkg.com/@dnd-kit/modifiers/-/modifiers-7.0.0.tgz#229666dd4e8b9487f348035117f993af755b3db9"
integrity sha512-BG/ETy3eBjFap7+zIti53f0PCLGDzNXyTmn6fSdrudORf+OH04MxrW4p5+mPu4mgMk9kM41iYONjc3DOUWTcfg==
dependencies:
"@dnd-kit/utilities" "^3.2.2"
tslib "^2.0.0"
"@dnd-kit/sortable@8.0.0":
version "8.0.0"
resolved "https://registry.yarnpkg.com/@dnd-kit/sortable/-/sortable-8.0.0.tgz#086b7ac6723d4618a4ccb6f0227406d8a8862a96"
integrity sha512-U3jk5ebVXe1Lr7c2wU7SBZjcWdQP+j7peHJfCspnA81enlu88Mgd7CC8Q+pub9ubP7eKVETzJW+IBAhsqbSu/g==
dependencies:
"@dnd-kit/utilities" "^3.2.2"
tslib "^2.0.0"
"@dnd-kit/utilities@^3.2.2":
version "3.2.2"
resolved "https://registry.yarnpkg.com/@dnd-kit/utilities/-/utilities-3.2.2.tgz#5a32b6af356dc5f74d61b37d6f7129a4040ced7b"
integrity sha512-+MKAJEOfaBe5SmV6t34p80MMKhjvUz0vRrvVJbPT0WElzaOJ/1xs+D+KDv+tD/NE5ujfrChEcshd4fLn0wpiqg==
dependencies:
tslib "^2.0.0"
"@emotion/hash@^0.8.0":
version "0.8.0"
resolved "https://registry.npmjs.org/@emotion/hash/-/hash-0.8.0.tgz"
@@ -14860,11 +14821,6 @@ tslib@^1.8.1, tslib@^1.9.0:
resolved "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz"
integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==
tslib@^2.0.0:
version "2.6.2"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae"
integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==
tsutils@^3.21.0:
version "3.21.0"
resolved "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz"

2
go.mod
View File

@@ -5,7 +5,7 @@ go 1.21
require (
github.com/ClickHouse/clickhouse-go/v2 v2.15.0
github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb
github.com/SigNoz/signoz-otel-collector v0.88.3
github.com/SigNoz/signoz-otel-collector v0.88.1
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974
github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974
github.com/antonmedv/expr v1.15.3

4
go.sum
View File

@@ -98,8 +98,8 @@ github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb h1:bneLSKPf9YUSFm
github.com/SigNoz/govaluate v0.0.0-20220522085550-d19c08c206cb/go.mod h1:JznGDNg9x1cujDKa22RaQOimOvvEfy3nxzDGd8XDgmA=
github.com/SigNoz/prometheus v1.9.78 h1:bB3yuDrRzi/Mv00kWayR9DZbyjTuGfendSqISyDcXiY=
github.com/SigNoz/prometheus v1.9.78/go.mod h1:MffmFu2qFILQrOHehx3D0XjYtaZMVfI+Ppeiv98x4Ww=
github.com/SigNoz/signoz-otel-collector v0.88.3 h1:30sEJZmCQjfjo8CZGxqXKZkWE7Zij9TeS1uUqNFEZRU=
github.com/SigNoz/signoz-otel-collector v0.88.3/go.mod h1:KyEc6JSFS6f8Nw3UdSm4aGDGucEpQYZUdYwjvY8uMVc=
github.com/SigNoz/signoz-otel-collector v0.88.1 h1:Xeu6Kn8VA0g6it60PMIAclayYSIogBq0rnkodlpxllI=
github.com/SigNoz/signoz-otel-collector v0.88.1/go.mod h1:KyEc6JSFS6f8Nw3UdSm4aGDGucEpQYZUdYwjvY8uMVc=
github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc=
github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo=
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY=

View File

@@ -43,7 +43,6 @@ import (
promModel "github.com/prometheus/common/model"
"go.uber.org/zap"
"go.signoz.io/signoz/pkg/query-service/app/dashboards"
"go.signoz.io/signoz/pkg/query-service/app/logs"
"go.signoz.io/signoz/pkg/query-service/app/services"
"go.signoz.io/signoz/pkg/query-service/auth"
@@ -52,7 +51,6 @@ import (
"go.signoz.io/signoz/pkg/query-service/interfaces"
"go.signoz.io/signoz/pkg/query-service/model"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/rules"
"go.signoz.io/signoz/pkg/query-service/telemetry"
"go.signoz.io/signoz/pkg/query-service/utils"
)
@@ -3423,100 +3421,6 @@ func (r *ClickHouseReader) GetTagsInfoInLastHeartBeatInterval(ctx context.Contex
return &tagsInfo, nil
}
// GetDashboardsInfo returns analytics data for dashboards
func (r *ClickHouseReader) GetDashboardsInfo(ctx context.Context) (*model.DashboardsInfo, error) {
dashboardsInfo := model.DashboardsInfo{}
// fetch dashboards from dashboard db
query := "SELECT data FROM dashboards"
var dashboardsData []dashboards.Dashboard
err := r.localDB.Select(&dashboardsData, query)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
return &dashboardsInfo, err
}
for _, dashboard := range dashboardsData {
dashboardsInfo = countPanelsInDashboard(dashboard.Data)
}
dashboardsInfo.TotalDashboards = len(dashboardsData)
return &dashboardsInfo, nil
}
func countPanelsInDashboard(data map[string]interface{}) model.DashboardsInfo {
var logsPanelCount, tracesPanelCount, metricsPanelCount int
// totalPanels := 0
if data != nil && data["widgets"] != nil {
widgets, ok := data["widgets"].(interface{})
if ok {
data, ok := widgets.([]interface{})
if ok {
for _, widget := range data {
sData, ok := widget.(map[string]interface{})
if ok && sData["query"] != nil {
// totalPanels++
query, ok := sData["query"].(interface{}).(map[string]interface{})
if ok && query["queryType"] == "builder" && query["builder"] != nil {
builderData, ok := query["builder"].(interface{}).(map[string]interface{})
if ok && builderData["queryData"] != nil {
builderQueryData, ok := builderData["queryData"].([]interface{})
if ok {
for _, queryData := range builderQueryData {
data, ok := queryData.(map[string]interface{})
if ok {
if data["dataSource"] == "traces" {
tracesPanelCount++
} else if data["dataSource"] == "metrics" {
metricsPanelCount++
} else if data["dataSource"] == "logs" {
logsPanelCount++
}
}
}
}
}
}
}
}
}
}
}
return model.DashboardsInfo{
LogsBasedPanels: logsPanelCount,
TracesBasedPanels: tracesPanelCount,
MetricBasedPanels: metricsPanelCount,
}
}
func (r *ClickHouseReader) GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error) {
alertsInfo := model.AlertsInfo{}
// fetch alerts from rules db
query := "SELECT data FROM rules"
var alertsData []string
err := r.localDB.Select(&alertsData, query)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
return &alertsInfo, err
}
for _, alert := range alertsData {
var rule rules.GettableRule
err = json.Unmarshal([]byte(alert), &rule)
if err != nil {
zap.S().Errorf("msg:", "invalid rule data", "\t err:", err)
continue
}
if rule.AlertType == "LOGS_BASED_ALERT" {
alertsInfo.LogsBasedAlerts = alertsInfo.LogsBasedAlerts + 1
} else if rule.AlertType == "METRIC_BASED_ALERT" {
alertsInfo.MetricBasedAlerts = alertsInfo.MetricBasedAlerts + 1
} else if rule.AlertType == "TRACES_BASED_ALERT" {
alertsInfo.TracesBasedAlerts = alertsInfo.TracesBasedAlerts + 1
}
alertsInfo.TotalAlerts = alertsInfo.TotalAlerts + 1
}
return &alertsInfo, nil
}
func (r *ClickHouseReader) GetLogFields(ctx context.Context) (*model.GetFieldsResponse, *model.ApiError) {
// response will contain top level fields from the otel log model
response := model.GetFieldsResponse{

View File

@@ -483,10 +483,8 @@ func isOrderByTs(orderBy []v3.OrderBy) bool {
func PrepareLogsQuery(start, end int64, queryType v3.QueryType, panelType v3.PanelType, mq *v3.BuilderQuery, options Options) (string, error) {
// adjust the start and end time to the step interval
if panelType != v3.PanelTypeList {
start = start - (start % (mq.StepInterval * 1000))
end = end - (end % (mq.StepInterval * 1000))
}
start = start - (start % (mq.StepInterval * 1000))
end = end - (end % (mq.StepInterval * 1000))
if options.IsLivetailQuery {
query, err := buildLogsLiveTailQuery(mq)

View File

@@ -1353,7 +1353,7 @@ var testPrepLogsQueryLimitOffsetData = []struct {
PageSize: 5,
},
TableName: "logs",
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) order by timestamp desc LIMIT 1",
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360000000000 AND timestamp <= 1680066420000000000) order by timestamp desc LIMIT 1",
},
{
Name: "Test limit greater than pageSize - order by ts",
@@ -1374,7 +1374,7 @@ var testPrepLogsQueryLimitOffsetData = []struct {
PageSize: 10,
},
TableName: "logs",
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by timestamp desc LIMIT 10",
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360000000000 AND timestamp <= 1680066420000000000) AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by timestamp desc LIMIT 10",
},
{
Name: "Test limit less than pageSize - order by custom",
@@ -1393,7 +1393,7 @@ var testPrepLogsQueryLimitOffsetData = []struct {
PageSize: 5,
},
TableName: "logs",
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) order by attributes_string_value[indexOf(attributes_string_key, 'method')] desc LIMIT 1 OFFSET 0",
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360000000000 AND timestamp <= 1680066420000000000) order by attributes_string_value[indexOf(attributes_string_key, 'method')] desc LIMIT 1 OFFSET 0",
},
{
Name: "Test limit greater than pageSize - order by custom",
@@ -1414,7 +1414,7 @@ var testPrepLogsQueryLimitOffsetData = []struct {
PageSize: 50,
},
TableName: "logs",
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by attributes_string_value[indexOf(attributes_string_key, 'method')] desc LIMIT 50 OFFSET 50",
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360000000000 AND timestamp <= 1680066420000000000) AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by attributes_string_value[indexOf(attributes_string_key, 'method')] desc LIMIT 50 OFFSET 50",
},
}

View File

@@ -1,57 +0,0 @@
package cumulative
import (
"fmt"
"strings"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
)
// groupingSets returns a string of comma separated tags for group by clause
// `ts` is always added to the group by clause
func groupingSets(tags ...string) string {
withTs := append(tags, "ts")
return fmt.Sprintf(`GROUPING SETS ( (%s), (%s) )`, strings.Join(withTs, ", "), strings.Join(tags, ", "))
}
// groupingSetsByAttributeKeyTags returns a string of comma separated tags for group by clause
func groupingSetsByAttributeKeyTags(tags ...v3.AttributeKey) string {
groupTags := []string{}
for _, tag := range tags {
groupTags = append(groupTags, tag.Key)
}
return groupingSets(groupTags...)
}
// groupBy returns a string of comma separated tags for group by clause
func groupByAttributeKeyTags(tags ...v3.AttributeKey) string {
groupTags := []string{}
for _, tag := range tags {
groupTags = append(groupTags, tag.Key)
}
groupTags = append(groupTags, "ts")
return strings.Join(groupTags, ", ")
}
// orderBy returns a string of comma separated tags for order by clause
// if the order is not specified, it defaults to ASC
func orderByAttributeKeyTags(items []v3.OrderBy, tags []v3.AttributeKey) string {
var orderBy []string
for _, tag := range tags {
found := false
for _, item := range items {
if item.ColumnName == tag.Key {
found = true
orderBy = append(orderBy, fmt.Sprintf("%s %s", item.ColumnName, item.Order))
break
}
}
if !found {
orderBy = append(orderBy, fmt.Sprintf("%s ASC", tag.Key))
}
}
orderBy = append(orderBy, "ts ASC")
return strings.Join(orderBy, ", ")
}

View File

@@ -1,220 +0,0 @@
package cumulative
import (
"fmt"
v4 "go.signoz.io/signoz/pkg/query-service/app/metrics/v4"
"go.signoz.io/signoz/pkg/query-service/constants"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/utils"
)
// See https://clickhouse.com/docs/en/sql-reference/window-functions for more details on `lagInFrame` function
//
// Calculating the rate of change of a metric is a common use case.
// Requests and errors are two examples of metrics that are often expressed as a rate of change.
// The rate of change is the difference between the current value and the previous value divided by
// the time difference between the current and previous values (i.e. the time interval).
//
// The value of a cumulative counter always increases. However, the rate of change can be negative
// if the value decreases between two samples. This can happen if the counter is reset when the
// application restarts or if the counter is reset manually. In this case, the rate of change is
// not meaningful and should be ignored.
//
// The condition `(per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0`
// checks if the rate of change is negative. If it is negative, the value is replaced with `nan`.
//
// The condition `ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400` checks
// if the time difference between the current and previous values is greater than or equal to 1 day.
// The first sample of a metric is always `nan` because there is no previous value to compare it to.
// When the first sample is encountered, the previous value for the time is set to default i.e `1970-01-01`.
// Since any difference between the first sample timestamp and the previous value timestamp will be
// greater than or equal to 1 day, the rate of change for the first sample will be `nan`.
//
// If neither of the above conditions are true, the rate of change is calculated as
// `(per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window)`
// where `rate_window` is a window function that partitions the data by fingerprint and orders it by timestamp.
// We want to calculate the rate of change for each time series, so we partition the data by fingerprint.
//
// The `increase` function is similar to the `rate` function, except that it does not divide by the time interval.
const (
rateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window)))`
increaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window)))`
)
// prepareTimeAggregationSubQueryTimeSeries prepares the sub-query to be used for temporal aggregation
// of time series data
// The following example illustrates how the sub-query is used to calculate the sume of values for each
// time series in a 15 seconds interval:
// ```
// timestamp 01.00 01.05 01.10 01.15 01.20 01.25 01.30 01.35 01.40
// +------+------+------+------+------+------+------+------+------+
// | | | | | | | | | |
// | v1 | v2 | v3 | v4 | v5 | v6 | v7 | v8 | v9 |
// | | | | | | | | | |
// +------+------+------+------+------+------+------+------+------+
// | | | | | | | | |
// | | | | | | | | |
// | | |
// +------+ +------+ +------+
// | v1+ | | v4+ | | v7+ |
// | v2+ | | v5+ | | v8+ |
// | v3 | | v6 | | v9 |
// +------+ +------+ +------+
// 01.00 01.15 01.30
// ```
// Calculating the rate/increase involves an additional step. We first calculate the maximum value for each time series
// in a 15 seconds interval. Then, we calculate the difference between the current maximum value and the previous
// maximum value
// The following example illustrates how the sub-query is used to calculate the rate of change for each time series
// in a 15 seconds interval:
// ```
// timestamp 01.00 01.05 01.10 01.15 01.20 01.25 01.30 01.35 01.40
// +------+------+------+------+------+------+------+------+------+
// | | | | | | | | | |
// | v1 | v2 | v3 | v4 | v5 | v6 | v7 | v8 | v9 |
// | | | | | | | | | |
// +------+------+------+------+------+------+------+------+------+
// | | | | | | | | |
// | | | | | | | | |
// | | |
// +------+ +------+ +------+
// max(| v1, | max(| v4, | max(| v7, |
// | v2, | | v5, | | v8, |
// | v3 |) | v6 |) | v9 |)
// +------+ +------+ +------+
// 01.00 01.15 01.30
// +-------+ +--------+
// | V6-V2 | | V9-V6 |
// | | | |
// | | | |
// +------+ +--------+
// 01.00 01.15
// ```
// The rate of change is calculated as (Vy - Vx) / (Ty - Tx) where Vx and Vy are the values at time Tx and Ty respectively.
// In an ideal scenario, the last value of each interval could be used to calculate the rate of change. Instead, we use
// the maximum value of each interval to calculate the rate of change. This is because any process restart can cause the
// value to be reset to 0. This will produce an inaccurate result. The max is the best approximation we can get.
// We don't expect the process to restart very often, so this should be a good approximation.
func prepareTimeAggregationSubQueryTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
var subQuery string
timeSeriesSubQuery, err := v4.PrepareTimeseriesFilterQuery(mq)
if err != nil {
return "", err
}
samplesTableFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end)
// Select the aggregate value for interval
queryTmpl :=
"SELECT fingerprint, %s" +
" toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL %d SECOND) as ts," +
" %s as per_series_value" +
" FROM " + constants.SIGNOZ_METRIC_DBNAME + "." + constants.SIGNOZ_SAMPLES_TABLENAME +
" INNER JOIN" +
" (%s) as filtered_time_series" +
" USING fingerprint" +
" WHERE " + samplesTableFilter +
" GROUP BY fingerprint, ts" +
" ORDER BY fingerprint, ts"
var selectLabelsAny string
for _, tag := range mq.GroupBy {
selectLabelsAny += fmt.Sprintf("any(%s) as %s,", tag.Key, tag.Key)
}
var selectLabels string
for _, tag := range mq.GroupBy {
selectLabels += tag.Key + ","
}
switch mq.TimeAggregation {
case v3.TimeAggregationAvg:
op := "avg(value)"
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
case v3.TimeAggregationSum:
op := "sum(value)"
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
case v3.TimeAggregationMin:
op := "min(value)"
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
case v3.TimeAggregationMax:
op := "max(value)"
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
case v3.TimeAggregationCount:
op := "count(value)"
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
case v3.TimeAggregationCountDistinct:
op := "count(distinct(value))"
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
case v3.TimeAggregationAnyLast:
op := "anyLast(value)"
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
case v3.TimeAggregationRate:
op := "max(value)"
innerSubQuery := fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
rateQueryTmpl :=
"SELECT %s ts, " + rateWithoutNegative +
" as per_series_value FROM (%s) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)"
subQuery = fmt.Sprintf(rateQueryTmpl, selectLabels, innerSubQuery)
case v3.TimeAggregationIncrease:
op := "max(value)"
innerSubQuery := fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
rateQueryTmpl :=
"SELECT %s ts, " + increaseWithoutNegative +
" as per_series_value FROM (%s) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)"
subQuery = fmt.Sprintf(rateQueryTmpl, selectLabels, innerSubQuery)
}
return subQuery, nil
}
// prepareMetricQueryCumulativeTimeSeries prepares the query to be used for fetching metrics
func prepareMetricQueryCumulativeTimeSeries(start, end, step int64, mq *v3.BuilderQuery) (string, error) {
var query string
temporalAggSubQuery, err := prepareTimeAggregationSubQueryTimeSeries(start, end, step, mq)
if err != nil {
return "", err
}
groupBy := groupingSetsByAttributeKeyTags(mq.GroupBy...)
orderBy := orderByAttributeKeyTags(mq.OrderBy, mq.GroupBy)
selectLabels := groupByAttributeKeyTags(mq.GroupBy...)
queryTmpl :=
"SELECT %s," +
" %s as value" +
" FROM (%s)" +
" WHERE isNaN(per_series_value) = 0" +
" GROUP BY %s" +
" ORDER BY %s"
switch mq.SpaceAggregation {
case v3.SpaceAggregationAvg:
op := "avg(per_series_value)"
query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
case v3.SpaceAggregationSum:
op := "sum(per_series_value)"
query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
case v3.SpaceAggregationMin:
op := "min(per_series_value)"
query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
case v3.SpaceAggregationMax:
op := "max(per_series_value)"
query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
case v3.SpaceAggregationCount:
op := "count(per_series_value)"
query = fmt.Sprintf(queryTmpl, selectLabels, op, temporalAggSubQuery, groupBy, orderBy)
}
return query, nil
}

View File

@@ -1,229 +0,0 @@
package cumulative
import (
"testing"
"github.com/stretchr/testify/assert"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
)
func TestPrepareTimeAggregationSubQuery(t *testing.T) {
// The time aggregation is performed for each unique series - since the fingerprint represents the
// unique hash of label set, we always group by fingerprint regardless of the GroupBy
// This sub result is then aggregated on dimensions using the provided GroupBy clause keys
testCases := []struct {
name string
builderQuery *v3.BuilderQuery
start int64
end int64
expectedQueryContains string
}{
{
name: "test time aggregation = avg, temporality = cumulative",
builderQuery: &v3.BuilderQuery{
QueryName: "A",
StepInterval: 60,
DataSource: v3.DataSourceMetrics,
AggregateAttribute: v3.AttributeKey{
Key: "http_requests",
DataType: v3.AttributeKeyDataTypeFloat64,
Type: v3.AttributeKeyTypeUnspecified,
IsColumn: true,
IsJSON: false,
},
Temporality: v3.Cumulative,
Filters: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{
Key: "service_name",
Type: v3.AttributeKeyTypeTag,
DataType: v3.AttributeKeyDataTypeString,
},
Operator: v3.FilterOperatorNotEqual,
Value: "payment_service",
},
{
Key: v3.AttributeKey{
Key: "endpoint",
Type: v3.AttributeKeyTypeTag,
DataType: v3.AttributeKeyDataTypeString,
},
Operator: v3.FilterOperatorIn,
Value: []interface{}{"/paycallback", "/payme", "/paypal"},
},
},
},
GroupBy: []v3.AttributeKey{{
Key: "service_name",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeTag,
}},
Expression: "A",
Disabled: false,
TimeAggregation: v3.TimeAggregationAvg,
},
start: 1701794980000,
end: 1701796780000,
expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts",
},
{
name: "test time aggregation = rate, temporality = cumulative",
builderQuery: &v3.BuilderQuery{
QueryName: "A",
StepInterval: 60,
DataSource: v3.DataSourceMetrics,
AggregateAttribute: v3.AttributeKey{
Key: "http_requests",
DataType: v3.AttributeKeyDataTypeFloat64,
Type: v3.AttributeKeyTypeUnspecified,
IsColumn: true,
IsJSON: false,
},
Temporality: v3.Cumulative,
Filters: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{
Key: "service_name",
Type: v3.AttributeKeyTypeTag,
DataType: v3.AttributeKeyDataTypeString,
},
Operator: v3.FilterOperatorContains,
Value: "payment_service",
},
},
},
GroupBy: []v3.AttributeKey{{
Key: "service_name",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeTag,
}},
Expression: "A",
Disabled: false,
TimeAggregation: v3.TimeAggregationRate,
},
start: 1701794980000,
end: 1701796780000,
expectedQueryContains: "SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)",
},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
query, err := prepareTimeAggregationSubQueryTimeSeries(
testCase.start,
testCase.end,
testCase.builderQuery.StepInterval,
testCase.builderQuery,
)
assert.Nil(t, err)
assert.Contains(t, query, testCase.expectedQueryContains)
})
}
}
func TestPrepareTimeseriesQuery(t *testing.T) {
testCases := []struct {
name string
builderQuery *v3.BuilderQuery
start int64
end int64
expectedQueryContains string
}{
{
name: "test time aggregation = avg, space aggregation = sum, temporality = unspecified",
builderQuery: &v3.BuilderQuery{
QueryName: "A",
StepInterval: 60,
DataSource: v3.DataSourceMetrics,
AggregateAttribute: v3.AttributeKey{
Key: "system_memory_usage",
DataType: v3.AttributeKeyDataTypeFloat64,
Type: v3.AttributeKeyTypeUnspecified,
IsColumn: true,
IsJSON: false,
},
Temporality: v3.Unspecified,
Filters: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{
Key: "state",
Type: v3.AttributeKeyTypeTag,
DataType: v3.AttributeKeyDataTypeString,
},
Operator: v3.FilterOperatorNotEqual,
Value: "idle",
},
},
},
GroupBy: []v3.AttributeKey{},
Expression: "A",
Disabled: false,
TimeAggregation: v3.TimeAggregationAvg,
SpaceAggregation: v3.SpaceAggregationSum,
},
start: 1701794980000,
end: 1701796780000,
expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'system_memory_usage' AND temporality = 'Unspecified' AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name = 'system_memory_usage' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (ts), () ) ORDER BY ts ASC",
},
{
name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative",
builderQuery: &v3.BuilderQuery{
QueryName: "A",
StepInterval: 60,
DataSource: v3.DataSourceMetrics,
AggregateAttribute: v3.AttributeKey{
Key: "http_requests",
DataType: v3.AttributeKeyDataTypeFloat64,
Type: v3.AttributeKeyTypeUnspecified,
IsColumn: true,
IsJSON: false,
},
Temporality: v3.Cumulative,
Filters: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{
Key: "service_name",
Type: v3.AttributeKeyTypeTag,
DataType: v3.AttributeKeyDataTypeString,
},
Operator: v3.FilterOperatorContains,
Value: "payment_service",
},
},
},
GroupBy: []v3.AttributeKey{{
Key: "service_name",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeTag,
}},
Expression: "A",
Disabled: false,
TimeAggregation: v3.TimeAggregationRate,
SpaceAggregation: v3.SpaceAggregationSum,
},
start: 1701794980000,
end: 1701796780000,
expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'http_requests' AND temporality = 'Cumulative' AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name = 'http_requests' AND timestamp_ms >= 1701794980000 AND timestamp_ms <= 1701796780000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY GROUPING SETS ( (service_name, ts), (service_name) ) ORDER BY service_name ASC, ts ASC",
},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
query, err := prepareMetricQueryCumulativeTimeSeries(
testCase.start,
testCase.end,
testCase.builderQuery.StepInterval,
testCase.builderQuery,
)
assert.Nil(t, err)
assert.Contains(t, query, testCase.expectedQueryContains)
})
}
}

View File

@@ -1021,6 +1021,13 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
queryRangeParams.Start = queryRangeParams.End
}
// round up the end to neaerest multiple
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
end := (queryRangeParams.End) / 1000
step := queryRangeParams.Step
queryRangeParams.End = (end / step * step) * 1000
}
// replace go template variables in clickhouse query
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeClickHouseSQL {
for _, chQuery := range queryRangeParams.CompositeQuery.ClickHouseQueries {

View File

@@ -417,7 +417,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
}
// if telemetry.GetInstance().IsSampled() {
if _, ok := telemetry.EnabledPaths()[path]; ok {
if _, ok := telemetry.IgnoredPaths()[path]; !ok {
userEmail, err := auth.GetEmailFromJwt(r.Context())
if err == nil {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail)

View File

@@ -71,8 +71,6 @@ type Reader interface {
GetListResultV3(ctx context.Context, query string) ([]*v3.Row, error)
LiveTailLogsV3(ctx context.Context, query string, timestampStart uint64, idStart string, client *v3.LogsLiveTailClient)
GetDashboardsInfo(ctx context.Context) (*model.DashboardsInfo, error)
GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error)
GetTotalSpans(ctx context.Context) (uint64, error)
GetSpansInLastHeartBeatInterval(ctx context.Context) (uint64, error)
GetTimeSeriesInfo(ctx context.Context) (map[string]interface{}, error)

View File

@@ -55,14 +55,14 @@ var BasicPlan = FeatureSet{
Name: QueryBuilderPanels,
Active: true,
Usage: 0,
UsageLimit: 20,
UsageLimit: 5,
Route: "",
},
Feature{
Name: QueryBuilderAlerts,
Active: true,
Usage: 0,
UsageLimit: 10,
UsageLimit: 5,
Route: "",
},
Feature{

View File

@@ -615,20 +615,6 @@ type TagsInfo struct {
Env string `json:"env"`
}
type AlertsInfo struct {
TotalAlerts int `json:"totalAlerts"`
LogsBasedAlerts int `json:"logsBasedAlerts"`
MetricBasedAlerts int `json:"metricBasedAlerts"`
TracesBasedAlerts int `json:"tracesBasedAlerts"`
}
type DashboardsInfo struct {
TotalDashboards int `json:"totalDashboards"`
LogsBasedPanels int `json:"logsBasedPanels"`
MetricBasedPanels int `json:"metricBasedPanels"`
TracesBasedPanels int `json:"tracesBasedPanels"`
}
type TagTelemetryData struct {
ServiceName string `json:"serviceName" ch:"serviceName"`
Env string `json:"env" ch:"env"`

View File

@@ -14,7 +14,7 @@ import (
func TestThresholdRuleCombinations(t *testing.T) {
postableRule := PostableRule{
Alert: "Tricky Condition Tests",
AlertType: "METRIC_BASED_ALERT",
AlertType: "METRICS_BASED_ALERT",
RuleType: RuleTypeThreshold,
EvalWindow: Duration(5 * time.Minute),
Frequency: Duration(1 * time.Minute),

View File

@@ -1,11 +1,16 @@
package telemetry
func EnabledPaths() map[string]struct{} {
enabledPaths := map[string]struct{}{
"/api/v1/channels": {},
func IgnoredPaths() map[string]struct{} {
ignoredPaths := map[string]struct{}{
"/api/v1/tags": {},
"/api/v1/version": {},
"/api/v1/query_range": {},
"/api/v2/metrics/query_range": {},
"/api/v1/health": {},
"/api/v1/featureFlags": {},
}
return enabledPaths
return ignoredPaths
}
func ignoreEvents(event string, attributes map[string]interface{}) bool {

View File

@@ -38,7 +38,6 @@ const (
TELEMETRY_EVENT_LOGS_FILTERS = "Logs Filters"
TELEMETRY_EVENT_DISTRIBUTED = "Distributed"
TELEMETRY_EVENT_QUERY_RANGE_V3 = "Query Range V3 Metadata"
TELEMETRY_EVENT_DASHBOARDS_ALERTS = "Dashboards/Alerts Info"
TELEMETRY_EVENT_ACTIVE_USER = "Active User"
TELEMETRY_EVENT_ACTIVE_USER_PH = "Active User V2"
TELEMETRY_EVENT_USER_INVITATION_SENT = "User Invitation Sent"
@@ -54,7 +53,6 @@ var SAAS_EVENTS_LIST = map[string]struct{}{
TELEMETRY_EVENT_ENVIRONMENT: {},
TELEMETRY_EVENT_USER_INVITATION_SENT: {},
TELEMETRY_EVENT_USER_INVITATION_ACCEPTED: {},
TELEMETRY_EVENT_DASHBOARDS_ALERTS: {},
}
const api_key = "4Gmoa4ixJAUHx2BpJxsjwA1bEfnwEeRz"
@@ -63,9 +61,9 @@ const ph_api_key = "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w"
const IP_NOT_FOUND_PLACEHOLDER = "NA"
const DEFAULT_NUMBER_OF_SERVICES = 6
const HEART_BEAT_DURATION = 12 * time.Hour
const HEART_BEAT_DURATION = 6 * time.Hour
const ACTIVE_USER_DURATION = 6 * time.Hour
const ACTIVE_USER_DURATION = 30 * time.Minute
// const HEART_BEAT_DURATION = 30 * time.Second
// const ACTIVE_USER_DURATION = 30 * time.Second
@@ -243,30 +241,9 @@ func createTelemetry() {
}
telemetry.SendEvent(TELEMETRY_EVENT_HEART_BEAT, data, "")
alertsInfo, err := telemetry.reader.GetAlertsInfo(context.Background())
if err != nil {
telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, map[string]interface{}{"error": err.Error()}, "")
} else {
dashboardsInfo, err := telemetry.reader.GetDashboardsInfo(context.Background())
if err == nil {
dashboardsAlertsData := map[string]interface{}{
"totalDashboards": dashboardsInfo.TotalDashboards,
"logsBasedPanels": dashboardsInfo.LogsBasedPanels,
"metricBasedPanels": dashboardsInfo.MetricBasedPanels,
"tracesBasedPanels": dashboardsInfo.TracesBasedPanels,
"totalAlerts": alertsInfo.TotalAlerts,
"logsBasedAlerts": alertsInfo.LogsBasedAlerts,
"metricBasedAlerts": alertsInfo.MetricBasedAlerts,
"tracesBasedAlerts": alertsInfo.TracesBasedAlerts,
}
telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, dashboardsAlertsData, "")
} else {
telemetry.SendEvent(TELEMETRY_EVENT_DASHBOARDS_ALERTS, map[string]interface{}{"error": err.Error()}, "")
}
}
getDistributedInfoInLastHeartBeatInterval, _ := telemetry.reader.GetDistributedInfoInLastHeartBeatInterval(context.Background())
telemetry.SendEvent(TELEMETRY_EVENT_DISTRIBUTED, getDistributedInfoInLastHeartBeatInterval, "")
}
}
}()

View File

@@ -2,7 +2,7 @@ version: "2.4"
x-clickhouse-defaults: &clickhouse-defaults
restart: on-failure
image: clickhouse/clickhouse-server:23.11.1-alpine
image: clickhouse/clickhouse-server:23.7.3-alpine
tty: true
depends_on:
- zookeeper-1
@@ -192,7 +192,7 @@ services:
<<: *db-depend
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.3}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.1}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -205,7 +205,7 @@ services:
# condition: service_healthy
otel-collector:
image: signoz/signoz-otel-collector:0.88.3
image: signoz/signoz-otel-collector:0.88.1
container_name: signoz-otel-collector
command:
[
@@ -245,7 +245,7 @@ services:
condition: service_healthy
otel-collector-metrics:
image: signoz/signoz-otel-collector:0.88.3
image: signoz/signoz-otel-collector:0.88.1
container_name: signoz-otel-collector-metrics
command:
[