mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-08 02:39:55 +00:00
Compare commits
44 Commits
testingtf
...
feat/multi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3fe6aa9fdf | ||
|
|
caaab625cc | ||
|
|
8160e1a499 | ||
|
|
fcf633b397 | ||
|
|
ef36f1e84a | ||
|
|
73f57d8bee | ||
|
|
ab17bf3558 | ||
|
|
eb5a1b76b8 | ||
|
|
130ff925bd | ||
|
|
75d86cea60 | ||
|
|
cf451d335c | ||
|
|
e47c7cc17b | ||
|
|
629c54d3f9 | ||
|
|
ed3026eeb5 | ||
|
|
ccf26883c4 | ||
|
|
958924befe | ||
|
|
b70c570cdc | ||
|
|
42a026469b | ||
|
|
6de0908a62 | ||
|
|
fd21a4955e | ||
|
|
3dce13d29f | ||
|
|
2ce4b60c55 | ||
|
|
c9888804cd | ||
|
|
413b0d9fae | ||
|
|
b24095236f | ||
|
|
21d239ce68 | ||
|
|
d6e4e3c5ed | ||
|
|
552b103e8b | ||
|
|
1123a9a93d | ||
|
|
8b30e3cc5c | ||
|
|
b86e65d2ca | ||
|
|
d5e2841083 | ||
|
|
7dad5dcd17 | ||
|
|
ac0b640146 | ||
|
|
e125d146b5 | ||
|
|
a41ffceca4 | ||
|
|
7edb047c0c | ||
|
|
6504f2565b | ||
|
|
6b418a125b | ||
|
|
36827a1667 | ||
|
|
1118c56356 | ||
|
|
bd071e3e60 | ||
|
|
36f3a2e26d | ||
|
|
fee7e96176 |
@@ -1,5 +1,4 @@
|
||||
services:
|
||||
|
||||
clickhouse:
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
container_name: clickhouse
|
||||
@@ -24,7 +23,6 @@ services:
|
||||
retries: 3
|
||||
depends_on:
|
||||
- zookeeper
|
||||
|
||||
zookeeper:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
container_name: zookeeper
|
||||
@@ -41,9 +39,8 @@ services:
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:0.111.29
|
||||
image: signoz/signoz-schema-migrator:v0.111.40
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -55,9 +52,8 @@ services:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:0.111.29
|
||||
image: signoz/signoz-schema-migrator:v0.111.40
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
@@ -11,3 +11,4 @@
|
||||
/pkg/errors/ @grandwizard28
|
||||
/pkg/factory/ @grandwizard28
|
||||
/pkg/types/ @grandwizard28
|
||||
/pkg/sqlmigration/ @vikrantgupta25
|
||||
|
||||
1
.github/workflows/build-enterprise.yaml
vendored
1
.github/workflows/build-enterprise.yaml
vendored
@@ -69,6 +69,7 @@ jobs:
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||
echo 'USERPILOT_KEY="${{ secrets.USERPILOT_KEY }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
|
||||
5
.github/workflows/build-staging.yaml
vendored
5
.github/workflows/build-staging.yaml
vendored
@@ -64,8 +64,9 @@ jobs:
|
||||
run: |
|
||||
mkdir -p frontend
|
||||
echo 'CI=1' > frontend/.env
|
||||
echo 'TUNNEL_URL=https://telemetry.staging.signoz.cloud/tunnel' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN=https://telemetry.staging.signoz.cloud' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.NP_TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.NP_TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'USERPILOT_KEY="${{ secrets.NP_USERPILOT_KEY }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
|
||||
1
.github/workflows/gor-signoz.yaml
vendored
1
.github/workflows/gor-signoz.yaml
vendored
@@ -35,6 +35,7 @@ jobs:
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> .env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> .env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> .env
|
||||
echo 'USERPILOT_KEY="${{ secrets.USERPILOT_KEY }}"' >> .env
|
||||
- name: build-frontend
|
||||
run: make js-build
|
||||
- name: upload-frontend-artifact
|
||||
|
||||
@@ -174,7 +174,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.80.0
|
||||
image: signoz/signoz:v0.81.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
@@ -208,7 +208,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.39
|
||||
image: signoz/signoz-otel-collector:v0.111.40
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -232,7 +232,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.39
|
||||
image: signoz/signoz-schema-migrator:v0.111.40
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.80.0
|
||||
image: signoz/signoz:v0.81.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
@@ -143,7 +143,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.39
|
||||
image: signoz/signoz-otel-collector:v0.111.40
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -167,7 +167,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.39
|
||||
image: signoz/signoz-schema-migrator:v0.111.40
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -177,7 +177,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.80.0}
|
||||
image: signoz/signoz:${VERSION:-v0.81.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -212,7 +212,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.40}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -238,7 +238,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.40}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -249,7 +249,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.40}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.80.0}
|
||||
image: signoz/signoz:${VERSION:-v0.81.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -144,7 +144,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.40}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +166,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.40}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +178,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.40}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
"math"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
|
||||
|
||||
@@ -17,7 +17,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
@@ -38,7 +37,6 @@ type APIHandlerOptions struct {
|
||||
IntegrationsController *integrations.Controller
|
||||
CloudIntegrationsController *cloudintegrations.Controller
|
||||
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
|
||||
Cache cache.Cache
|
||||
Gateway *httputil.ReverseProxy
|
||||
GatewayUrl string
|
||||
// Querier Influx Interval
|
||||
@@ -64,7 +62,6 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
IntegrationsController: opts.IntegrationsController,
|
||||
CloudIntegrationsController: opts.CloudIntegrationsController,
|
||||
LogsParsingPipelineController: opts.LogsParsingPipelineController,
|
||||
Cache: opts.Cache,
|
||||
FluxInterval: opts.FluxInterval,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
FieldsAPI: fields.NewAPI(signoz.TelemetryStore),
|
||||
|
||||
@@ -85,25 +85,25 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
switch seasonality {
|
||||
case anomaly.SeasonalityWeekly:
|
||||
provider = anomaly.NewWeeklyProvider(
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](aH.opts.Cache),
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](aH.Signoz.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector),
|
||||
)
|
||||
case anomaly.SeasonalityDaily:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.Signoz.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
)
|
||||
case anomaly.SeasonalityHourly:
|
||||
provider = anomaly.NewHourlyProvider(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](aH.opts.Cache),
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](aH.Signoz.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector),
|
||||
)
|
||||
default:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.Signoz.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
)
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
@@ -41,7 +42,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
|
||||
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
@@ -57,7 +57,6 @@ type ServerOptions struct {
|
||||
HTTPHostPort string
|
||||
PrivateHostPort string
|
||||
PreferSpanMetrics bool
|
||||
CacheConfigPath string
|
||||
FluxInterval string
|
||||
FluxIntervalForTraceDetail string
|
||||
Cluster string
|
||||
@@ -134,19 +133,10 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
serverOptions.SigNoz.Cache,
|
||||
)
|
||||
|
||||
var c cache.Cache
|
||||
if serverOptions.CacheConfigPath != "" {
|
||||
cacheOpts, err := cache.LoadFromYAMLCacheConfigFile(serverOptions.CacheConfigPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c = cache.NewCache(cacheOpts)
|
||||
}
|
||||
|
||||
rm, err := makeRulesManager(
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
reader,
|
||||
c,
|
||||
serverOptions.SigNoz.Cache,
|
||||
serverOptions.SigNoz.Alertmanager,
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
@@ -223,7 +213,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
IntegrationsController: integrationsController,
|
||||
CloudIntegrationsController: cloudIntegrationsController,
|
||||
LogsParsingPipelineController: logParsingPipelineController,
|
||||
Cache: c,
|
||||
FluxInterval: fluxInterval,
|
||||
Gateway: gatewayProxy,
|
||||
GatewayUrl: serverOptions.GatewayUrl,
|
||||
|
||||
@@ -138,7 +138,6 @@ func main() {
|
||||
HTTPHostPort: baseconst.HTTPHostPort,
|
||||
PreferSpanMetrics: preferSpanMetrics,
|
||||
PrivateHostPort: baseconst.PrivateHostPort,
|
||||
CacheConfigPath: cacheConfigPath,
|
||||
FluxInterval: fluxInterval,
|
||||
FluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
|
||||
Cluster: cluster,
|
||||
|
||||
@@ -12,7 +12,7 @@ import (
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
|
||||
@@ -127,7 +127,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", alertname), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
|
||||
@@ -146,7 +146,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", alertname), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
} else if parsedRule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
@@ -161,7 +161,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", alertname), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
} else {
|
||||
|
||||
39
frontend/public/Logos/datadog.svg
Normal file
39
frontend/public/Logos/datadog.svg
Normal file
@@ -0,0 +1,39 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 23.0.4, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 800.5 907.77" style="enable-background:new 0 0 800.5 907.77;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<path class="st0" d="M303.36,238.61c31.36-21.37,71.76-12.97,65-6.53c-12.89,12.28,4.26,8.65,6.11,31.31
|
||||
c1.36,16.69-4.09,25.88-8.78,31.11c-9.79,1.28-21.69,3.67-36.02,8.33c-8.48,2.76-15.85,5.82-22.31,8.9
|
||||
c-1.7-1.11-3.55-2.47-5.74-4.36C279.5,288.19,280.24,254.37,303.36,238.61 M490.68,370.72c5.69-4.41,31.55-12.72,55.49-15.55
|
||||
c12.57-1.48,30.49-2.34,34.31-0.2c7.59,4.19,7.59,17.16,2.39,29.14c-7.57,17.4-18.27,36.63-30.39,38.21
|
||||
c-19.77,2.61-38.46-8.09-59.8-24.03C485.06,392.56,480.38,378.68,490.68,370.72 M526.75,201.27c29.19,13.58,25.37,39.42,26.18,54.6
|
||||
c0.22,4.36,0.15,7.3-0.22,9.32c-4.04-2.19-10.43-3.8-20.56-3.35c-2.96,0.12-5.84,0.47-8.63,0.91c-10.77-5.77-17.21-17.06-23.1-29.06
|
||||
c-0.54-1.11-0.96-2.1-1.36-3.06c-0.17-0.44-0.35-0.91-0.52-1.31c-0.07-0.22-0.12-0.39-0.2-0.59c-3.23-10.25-1.06-12.3,0.3-15.46
|
||||
c1.41-3.23,6.68-5.89-1.11-8.58c-0.67-0.25-1.5-0.39-2.44-0.57C500.25,197.72,515.7,196.17,526.75,201.27 M367.62,510.22
|
||||
c-31.45-20.19-63.99-49.15-78.22-65.18c-2.39-1.8-2-9.79-2-9.79c12.84,9.98,66.11,48.04,122.44,65.42
|
||||
c19.87,6.14,50.36,8.46,76.81-6.53c20.21-11.46,44.54-31.43,59.06-52.01l2.66,4.61c-0.1,3.06-6.78,17.97-10.18,23.96
|
||||
c6.14,3.53,10.72,4.49,17.55,6.36l46.64-7.27c16.74-27.04,28.74-70.65,15.95-112.16c-7.3-23.81-45.36-71.22-48.09-73.83
|
||||
c-9.56-9.19,1.6-44.69-17.35-83.42C532.86,159.41,480.67,116.69,458,98.1c6.68,4.88,47.82,21.47,67,44.62
|
||||
c1.8-2.39,2.54-14.82,4.19-17.97c-16.47-21.57-17.75-59.95-17.75-70.21c0-18.81-9.56-40.13-9.56-40.13s16.47,13.04,20.73,35.5
|
||||
c5.03,26.6,15.75,47.55,29.93,65.28c26.84,33.43,51.08,50.58,63.33,38.23C630.53,138.58,601,72.2,563.28,35.15
|
||||
C519.25-8.09,507.74-2.52,481.91,6.7c-20.61,7.35-31.75,65.87-85.47,64.71c-9.1-1.06-32.54-1.63-44.13-1.53
|
||||
c6.04-8.43,11.22-14.94,11.22-14.94s-18.02,7.25-33.38,16.44l-1.18-1.77c5.18-10.92,10.75-17.82,10.75-17.82s-14.4,8.65-27.54,19.01
|
||||
c2.39-13.02,11.44-21.27,11.44-21.27s-18.19,3.28-41.36,28.77c-26.33,7.2-32.66,11.93-53.64,21.22
|
||||
c-34.12-7.44-50.21-19.45-65.55-41.56c-11.68-16.89-32.47-19.45-53.71-10.72c-30.97,12.8-70.14,30.33-70.14,30.33
|
||||
s12.77-0.52,26.08,0.05c-18.22,6.9-35.72,16.39-35.72,16.39s8.53-0.3,19.06-0.12c-7.27,6.04-11.29,8.92-18.22,13.51
|
||||
c-16.66,12.1-30.17,26.08-30.17,26.08s11.31-5.15,21.47-8.04c-7.1,16.27-21.18,28.25-18.59,48.17
|
||||
c2.49,18.19,24.82,55.66,53.64,78.66c2.49,2,41.86,38.43,71.56,23.47c29.68-14.94,41.39-28.25,46.27-48.66
|
||||
c5.74-23.44,2.47-41.17-9.79-92.05c-4.04-16.79-14.57-51.37-19.65-67.91l1.13-0.81c9.71,20.49,34.56,74.5,44.57,110.78
|
||||
c15.63,56.57,10.75,85.27,3.6,95.79c-21.57,31.73-76.84,35.92-101.98,18.34c-3.85,60.91,9.76,87.73,14.37,101.24
|
||||
c-2.29,15.53,7.77,44.37,7.77,44.37s1.13-13.11,5.74-20.02c1.23,15.41,9,33.72,9,33.72s-0.47-11.31,3.06-21.08
|
||||
c4.98,8.43,8.63,10.43,13.34,16.76c4.71,16.47,14.15,28.5,14.15,28.5s-1.53-8.83-0.69-18.02c23.05,22.14,27.02,54.45,29.31,79.28
|
||||
c6.46,68.26-107.63,122.54-129.74,165.24c-16.76,25.29-26.8,65.3,1.58,88.89c68.6,56.97,42.25,72.65,76.59,97.69
|
||||
c47.11,34.34,106.05,18.96,126.11-8.97c27.93-38.92,20.76-75.63,10.38-109.97c-8.11-26.85-30.15-71.46-57.41-88.72
|
||||
c-27.86-17.65-54.95-20.95-77.9-18.59l2.12-2.44c33.01-6.56,67.52-2.96,92.49,13.14c28.35,18.22,54.28,49.47,67.84,97.37
|
||||
c15.38-2.19,17.55-3.18,31.63-5.18l-31.7-246.76L367.62,510.22z M385.94,819.52l-3.65-34.22l71.29-108.74l80.93,23.64l69.59-116.23
|
||||
L687.52,639l63.38-132.92l22.53,242.07L385.94,819.52z M774.27,456.51l-254.72,46.17c-6.31,8.13-21.91,22.41-29.41,26.13
|
||||
c-32.17,16.2-53.91,11.51-72.7,6.63c-12.08-3.06-19.08-4.78-29.11-9.29l-62.17,8.53l37.74,314.87l436.35-78.66L774.27,456.51z"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.8 KiB |
70
frontend/public/Logos/grafana.svg
Normal file
70
frontend/public/Logos/grafana.svg
Normal file
@@ -0,0 +1,70 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 21.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 142.5 145.6" style="enable-background:new 0 0 142.5 145.6;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#565656;}
|
||||
.st1{fill:url(#SVGID_1_);}
|
||||
</style>
|
||||
<g>
|
||||
<path class="st0" d="M28.7,131.5c-0.3,7.9-6.6,14.1-14.4,14.1C6.1,145.6,0,139,0,130.9s6.6-14.7,14.7-14.7c3.6,0,7.2,1.6,10.2,4.4
|
||||
l-2.3,2.9c-2.3-2-5.1-3.4-7.9-3.4c-5.9,0-10.8,4.8-10.8,10.8c0,6.1,4.6,10.8,10.4,10.8c5.2,0,9.3-3.8,10.2-8.8H12.6v-3.5h16.1
|
||||
V131.5z"/>
|
||||
<path class="st0" d="M42.3,129.5h-2.2c-2.4,0-4.4,2-4.4,4.4v11.4h-3.9v-19.6H35v1.6c1.1-1.1,2.7-1.6,4.6-1.6h4.2L42.3,129.5z"/>
|
||||
<path class="st0" d="M63.7,145.3h-3.4v-2.5c-2.6,2.5-6.6,3.7-10.7,1.9c-3-1.3-5.3-4.1-5.9-7.4c-1.2-6.3,3.7-11.9,9.9-11.9
|
||||
c2.6,0,5,1.1,6.7,2.8v-2.5h3.4V145.3z M59.7,137c0.9-4-2.1-7.6-6-7.6c-3.4,0-6.1,2.8-6.1,6.1c0,3.8,3.3,6.7,7.2,6.1
|
||||
C57.1,141.2,59.1,139.3,59.7,137z"/>
|
||||
<path class="st0" d="M71.5,124.7v1.1h6.2v3.4h-6.2v16.1h-3.8v-20.5c0-4.3,3.1-6.8,7-6.8h4.7l-1.6,3.7h-3.1
|
||||
C72.9,121.6,71.5,123,71.5,124.7z"/>
|
||||
<path class="st0" d="M98.5,145.3h-3.3v-2.5c-2.6,2.5-6.6,3.7-10.7,1.9c-3-1.3-5.3-4.1-5.9-7.4c-1.2-6.3,3.7-11.9,9.9-11.9
|
||||
c2.6,0,5,1.1,6.7,2.8v-2.5h3.4v19.6H98.5z M94.5,137c0.9-4-2.1-7.6-6-7.6c-3.4,0-6.1,2.8-6.1,6.1c0,3.8,3.3,6.7,7.2,6.1
|
||||
C92,141.2,93.9,139.3,94.5,137z"/>
|
||||
<path class="st0" d="M119.4,133.8v11.5h-3.9v-11.6c0-2.4-2-4.4-4.4-4.4c-2.5,0-4.4,2-4.4,4.4v11.6h-3.9v-19.6h3.2v1.7
|
||||
c1.4-1.3,3.3-2,5.2-2C115.8,125.5,119.4,129.2,119.4,133.8z"/>
|
||||
<path class="st0" d="M142.4,145.3h-3.3v-2.5c-2.6,2.5-6.6,3.7-10.7,1.9c-3-1.3-5.3-4.1-5.9-7.4c-1.2-6.3,3.7-11.9,9.9-11.9
|
||||
c2.6,0,5,1.1,6.7,2.8v-2.5h3.4v19.6H142.4z M138.4,137c0.9-4-2.1-7.6-6-7.6c-3.4,0-6.1,2.8-6.1,6.1c0,3.8,3.3,6.7,7.2,6.1
|
||||
C135.9,141.2,137.8,139.3,138.4,137z"/>
|
||||
</g>
|
||||
<linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="71.25" y1="10.4893" x2="71.25" y2="113.3415" gradientTransform="matrix(1 0 0 -1 0 148.6)">
|
||||
<stop offset="0" style="stop-color:#FCEE1F"/>
|
||||
<stop offset="1" style="stop-color:#F15B2A"/>
|
||||
</linearGradient>
|
||||
<path class="st1" d="M122.9,49.9c-0.2-1.9-0.5-4.1-1.1-6.5c-0.6-2.4-1.6-5-2.9-7.8c-1.4-2.7-3.1-5.6-5.4-8.3
|
||||
c-0.9-1.1-1.9-2.1-2.9-3.2c1.6-6.3-1.9-11.8-1.9-11.8c-6.1-0.4-9.9,1.9-11.3,2.9c-0.2-0.1-0.5-0.2-0.7-0.3c-1-0.4-2.1-0.8-3.2-1.2
|
||||
c-1.1-0.3-2.2-0.7-3.3-0.9c-1.1-0.3-2.3-0.5-3.5-0.7c-0.2,0-0.4-0.1-0.6-0.1C83.5,3.6,75.9,0,75.9,0c-8.7,5.6-10.4,13.1-10.4,13.1
|
||||
s0,0.2-0.1,0.4c-0.5,0.1-0.9,0.3-1.4,0.4c-0.6,0.2-1.3,0.4-1.9,0.7c-0.6,0.3-1.3,0.5-1.9,0.8c-1.3,0.6-2.5,1.2-3.8,1.9
|
||||
c-1.2,0.7-2.4,1.4-3.5,2.2c-0.2-0.1-0.3-0.2-0.3-0.2c-11.7-4.5-22.1,0.9-22.1,0.9c-0.9,12.5,4.7,20.3,5.8,21.7
|
||||
c-0.3,0.8-0.5,1.5-0.8,2.3c-0.9,2.8-1.5,5.7-1.9,8.7c-0.1,0.4-0.1,0.9-0.2,1.3c-10.8,5.3-14,16.3-14,16.3c9,10.4,19.6,11,19.6,11
|
||||
l0,0c1.3,2.4,2.9,4.7,4.6,6.8c0.7,0.9,1.5,1.7,2.3,2.6c-3.3,9.4,0.5,17.3,0.5,17.3c10.1,0.4,16.7-4.4,18.1-5.5c1,0.3,2,0.6,3,0.9
|
||||
c3.1,0.8,6.3,1.3,9.4,1.4c0.8,0,1.6,0,2.4,0h0.4H80h0.5H81l0,0c4.7,6.8,13.1,7.7,13.1,7.7c5.9-6.3,6.3-12.4,6.3-13.8l0,0
|
||||
c0,0,0,0,0-0.1s0-0.2,0-0.2l0,0c0-0.1,0-0.2,0-0.3c1.2-0.9,2.4-1.8,3.6-2.8c2.4-2.1,4.4-4.6,6.2-7.2c0.2-0.2,0.3-0.5,0.5-0.7
|
||||
c6.7,0.4,11.4-4.2,11.4-4.2c-1.1-7-5.1-10.4-5.9-11l0,0c0,0,0,0-0.1-0.1l-0.1-0.1l0,0l-0.1-0.1c0-0.4,0.1-0.8,0.1-1.3
|
||||
c0.1-0.8,0.1-1.5,0.1-2.3v-0.6v-0.3v-0.1c0-0.2,0-0.1,0-0.2v-0.5v-0.6c0-0.2,0-0.4,0-0.6s0-0.4-0.1-0.6l-0.1-0.6l-0.1-0.6
|
||||
c-0.1-0.8-0.3-1.5-0.4-2.3c-0.7-3-1.9-5.9-3.4-8.4c-1.6-2.6-3.5-4.8-5.7-6.8c-2.2-1.9-4.6-3.5-7.2-4.6c-2.6-1.2-5.2-1.9-7.9-2.2
|
||||
c-1.3-0.2-2.7-0.2-4-0.2h-0.5h-0.1h-0.2h-0.2h-0.5c-0.2,0-0.4,0-0.5,0c-0.7,0.1-1.4,0.2-2,0.3c-2.7,0.5-5.2,1.5-7.4,2.8
|
||||
c-2.2,1.3-4.1,3-5.7,4.9s-2.8,3.9-3.6,6.1c-0.8,2.1-1.3,4.4-1.4,6.5c0,0.5,0,1.1,0,1.6c0,0.1,0,0.3,0,0.4v0.4c0,0.3,0,0.5,0.1,0.8
|
||||
c0.1,1.1,0.3,2.1,0.6,3.1c0.6,2,1.5,3.8,2.7,5.4s2.5,2.8,4,3.8s3,1.7,4.6,2.2c1.6,0.5,3.1,0.7,4.5,0.6c0.2,0,0.4,0,0.5,0
|
||||
c0.1,0,0.2,0,0.3,0s0.2,0,0.3,0c0.2,0,0.3,0,0.5,0h0.1h0.1c0.1,0,0.2,0,0.3,0c0.2,0,0.4-0.1,0.5-0.1c0.2,0,0.3-0.1,0.5-0.1
|
||||
c0.3-0.1,0.7-0.2,1-0.3c0.6-0.2,1.2-0.5,1.8-0.7c0.6-0.3,1.1-0.6,1.5-0.9c0.1-0.1,0.3-0.2,0.4-0.3c0.5-0.4,0.6-1.1,0.2-1.6
|
||||
c-0.4-0.4-1-0.5-1.5-0.3C88,74,87.9,74,87.7,74.1c-0.4,0.2-0.9,0.4-1.3,0.5c-0.5,0.1-1,0.3-1.5,0.4c-0.3,0-0.5,0.1-0.8,0.1
|
||||
c-0.1,0-0.3,0-0.4,0c-0.1,0-0.3,0-0.4,0s-0.3,0-0.4,0c-0.2,0-0.3,0-0.5,0c0,0-0.1,0,0,0h-0.1h-0.1c-0.1,0-0.1,0-0.2,0
|
||||
s-0.3,0-0.4-0.1c-1.1-0.2-2.3-0.5-3.4-1c-1.1-0.5-2.2-1.2-3.1-2.1c-1-0.9-1.8-1.9-2.5-3.1c-0.7-1.2-1.1-2.5-1.3-3.8
|
||||
c-0.1-0.7-0.2-1.4-0.1-2.1c0-0.2,0-0.4,0-0.6c0,0.1,0,0,0,0v-0.1v-0.1c0-0.1,0-0.2,0-0.3c0-0.4,0.1-0.7,0.2-1.1c0.5-3,2-5.9,4.3-8.1
|
||||
c0.6-0.6,1.2-1.1,1.9-1.5c0.7-0.5,1.4-0.9,2.1-1.2c0.7-0.3,1.5-0.6,2.3-0.8s1.6-0.4,2.4-0.4c0.4,0,0.8-0.1,1.2-0.1
|
||||
c0.1,0,0.2,0,0.3,0h0.3h0.2c0.1,0,0,0,0,0h0.1h0.3c0.9,0.1,1.8,0.2,2.6,0.4c1.7,0.4,3.4,1,5,1.9c3.2,1.8,5.9,4.5,7.5,7.8
|
||||
c0.8,1.6,1.4,3.4,1.7,5.3c0.1,0.5,0.1,0.9,0.2,1.4v0.3V66c0,0.1,0,0.2,0,0.3c0,0.1,0,0.2,0,0.3v0.3v0.3c0,0.2,0,0.6,0,0.8
|
||||
c0,0.5-0.1,1-0.1,1.5c-0.1,0.5-0.1,1-0.2,1.5s-0.2,1-0.3,1.5c-0.2,1-0.6,1.9-0.9,2.9c-0.7,1.9-1.7,3.7-2.9,5.3
|
||||
c-2.4,3.3-5.7,6-9.4,7.7c-1.9,0.8-3.8,1.5-5.8,1.8c-1,0.2-2,0.3-3,0.3H81h-0.2h-0.3H80h-0.3c0.1,0,0,0,0,0h-0.1
|
||||
c-0.5,0-1.1,0-1.6-0.1c-2.2-0.2-4.3-0.6-6.4-1.2c-2.1-0.6-4.1-1.4-6-2.4c-3.8-2-7.2-4.9-9.9-8.2c-1.3-1.7-2.5-3.5-3.5-5.4
|
||||
s-1.7-3.9-2.3-5.9c-0.6-2-0.9-4.1-1-6.2v-0.4v-0.1v-0.1v-0.2V60v-0.1v-0.1v-0.2v-0.5V59l0,0v-0.2c0-0.3,0-0.5,0-0.8
|
||||
c0-1,0.1-2.1,0.3-3.2c0.1-1.1,0.3-2.1,0.5-3.2c0.2-1.1,0.5-2.1,0.8-3.2c0.6-2.1,1.3-4.1,2.2-6c1.8-3.8,4.1-7.2,6.8-9.9
|
||||
c0.7-0.7,1.4-1.3,2.2-1.9c0.3-0.3,1-0.9,1.8-1.4c0.8-0.5,1.6-1,2.5-1.4c0.4-0.2,0.8-0.4,1.3-0.6c0.2-0.1,0.4-0.2,0.7-0.3
|
||||
c0.2-0.1,0.4-0.2,0.7-0.3c0.9-0.4,1.8-0.7,2.7-1c0.2-0.1,0.5-0.1,0.7-0.2c0.2-0.1,0.5-0.1,0.7-0.2c0.5-0.1,0.9-0.2,1.4-0.4
|
||||
c0.2-0.1,0.5-0.1,0.7-0.2c0.2,0,0.5-0.1,0.7-0.1c0.2,0,0.5-0.1,0.7-0.1l0.4-0.1l0.4-0.1c0.2,0,0.5-0.1,0.7-0.1
|
||||
c0.3,0,0.5-0.1,0.8-0.1c0.2,0,0.6-0.1,0.8-0.1c0.2,0,0.3,0,0.5-0.1h0.3h0.2h0.2c0.3,0,0.5,0,0.8-0.1h0.4c0,0,0.1,0,0,0h0.1h0.2
|
||||
c0.2,0,0.5,0,0.7,0c0.9,0,1.8,0,2.7,0c1.8,0.1,3.6,0.3,5.3,0.6c3.4,0.6,6.7,1.7,9.6,3.2c2.9,1.4,5.6,3.2,7.8,5.1
|
||||
c0.1,0.1,0.3,0.2,0.4,0.4c0.1,0.1,0.3,0.2,0.4,0.4c0.3,0.2,0.5,0.5,0.8,0.7c0.3,0.2,0.5,0.5,0.8,0.7c0.2,0.3,0.5,0.5,0.7,0.8
|
||||
c1,1,1.9,2.1,2.7,3.1c1.6,2.1,2.9,4.2,3.9,6.2c0.1,0.1,0.1,0.2,0.2,0.4c0.1,0.1,0.1,0.2,0.2,0.4s0.2,0.5,0.4,0.7
|
||||
c0.1,0.2,0.2,0.5,0.3,0.7c0.1,0.2,0.2,0.5,0.3,0.7c0.4,0.9,0.7,1.8,1,2.7c0.5,1.4,0.8,2.6,1.1,3.6c0.1,0.4,0.5,0.7,0.9,0.7
|
||||
c0.5,0,0.8-0.4,0.8-0.9C123,52.7,123,51.4,122.9,49.9z"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 6.6 KiB |
1
frontend/public/Logos/temporal.svg
Normal file
1
frontend/public/Logos/temporal.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="192" height="192" fill="none" viewBox="0 0 192 192"><rect width="192" height="192" fill="url(#paint0_linear_1452_5317)" rx="24"/><path fill="#F2F2F2" d="M123.34 68.6596C119.655 41.0484 110.327 18 96 18C81.6731 18 72.3454 41.0484 68.6596 68.6596C41.0484 72.3454 18 81.6731 18 96C18 110.327 41.0525 119.655 68.6596 123.34C72.3454 150.948 81.6731 174 96 174C110.327 174 119.655 150.948 123.34 123.34C150.952 119.655 174 110.327 174 96C174 81.6731 150.948 72.3454 123.34 68.6596ZM67.7583 115.298C41.3151 111.479 25.893 102.737 25.893 96C25.893 89.2629 41.3151 80.5212 67.7583 76.7021C67.1764 83.0674 66.8733 89.566 66.8733 96C66.8733 102.434 67.1764 108.937 67.7583 115.298ZM96 25.893C102.737 25.893 111.479 41.3151 115.298 67.7583C108.937 67.1764 102.434 66.8733 96 66.8733C89.566 66.8733 83.0633 67.1764 76.7021 67.7583C80.5212 41.3151 89.2629 25.893 96 25.893ZM124.242 115.298C122.94 115.488 117.602 116.114 116.252 116.248C116.118 117.602 115.488 122.936 115.302 124.238C111.483 150.681 102.741 166.103 96.0041 166.103C89.267 166.103 80.5253 150.681 76.7061 124.238C76.5202 122.936 75.8898 117.598 75.7564 116.248C75.1421 109.979 74.7703 103.246 74.7703 96C74.7703 88.7537 75.1421 82.0206 75.7564 75.7483C82.0247 75.134 88.7577 74.7622 96.0041 74.7622C103.25 74.7622 109.983 75.134 116.252 75.7483C117.606 75.8817 122.94 76.5121 124.242 76.698C150.685 80.5172 166.111 89.2629 166.111 95.996C166.111 102.729 150.685 111.479 124.242 115.298Z"/><defs><linearGradient id="paint0_linear_1452_5317" x1="183" x2="0" y1="192" y2="0" gradientUnits="userSpaceOnUse"><stop stop-color="#444CE7"/><stop offset="1" stop-color="#B664FF"/></linearGradient></defs></svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
@@ -60,6 +60,8 @@ function App(): JSX.Element {
|
||||
|
||||
const { isCloudUser, isEnterpriseSelfHostedUser } = useGetTenantLicense();
|
||||
|
||||
const [isSentryInitialized, setIsSentryInitialized] = useState(false);
|
||||
|
||||
const enableAnalytics = useCallback(
|
||||
(user: IUser): void => {
|
||||
// wait for the required data to be loaded before doing init for anything!
|
||||
@@ -293,25 +295,29 @@ function App(): JSX.Element {
|
||||
Userpilot.initialize(process.env.USERPILOT_KEY);
|
||||
}
|
||||
|
||||
Sentry.init({
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
tunnel: process.env.TUNNEL_URL,
|
||||
environment: 'production',
|
||||
integrations: [
|
||||
Sentry.browserTracingIntegration(),
|
||||
Sentry.replayIntegration({
|
||||
maskAllText: false,
|
||||
blockAllMedia: false,
|
||||
}),
|
||||
],
|
||||
// Performance Monitoring
|
||||
tracesSampleRate: 1.0, // Capture 100% of the transactions
|
||||
// Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled
|
||||
tracePropagationTargets: [],
|
||||
// Session Replay
|
||||
replaysSessionSampleRate: 0.1, // This sets the sample rate at 10%. You may want to change it to 100% while in development and then sample at a lower rate in production.
|
||||
replaysOnErrorSampleRate: 1.0, // If you're not already sampling the entire session, change the sample rate to 100% when sampling sessions where errors occur.
|
||||
});
|
||||
if (!isSentryInitialized) {
|
||||
Sentry.init({
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
tunnel: process.env.TUNNEL_URL,
|
||||
environment: 'production',
|
||||
integrations: [
|
||||
Sentry.browserTracingIntegration(),
|
||||
Sentry.replayIntegration({
|
||||
maskAllText: false,
|
||||
blockAllMedia: false,
|
||||
}),
|
||||
],
|
||||
// Performance Monitoring
|
||||
tracesSampleRate: 1.0, // Capture 100% of the transactions
|
||||
// Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled
|
||||
tracePropagationTargets: [],
|
||||
// Session Replay
|
||||
replaysSessionSampleRate: 0.1, // This sets the sample rate at 10%. You may want to change it to 100% while in development and then sample at a lower rate in production.
|
||||
replaysOnErrorSampleRate: 1.0, // If you're not already sampling the entire session, change the sample rate to 100% when sampling sessions where errors occur.
|
||||
});
|
||||
|
||||
setIsSentryInitialized(true);
|
||||
}
|
||||
} else {
|
||||
posthog.reset();
|
||||
Sentry.close();
|
||||
@@ -320,6 +326,7 @@ function App(): JSX.Element {
|
||||
window.cioanalytics.reset();
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isCloudUser, isEnterpriseSelfHostedUser]);
|
||||
|
||||
// if the user is in logged in state
|
||||
|
||||
@@ -54,7 +54,9 @@ export const REACT_QUERY_KEY = {
|
||||
|
||||
// API Monitoring Query Keys
|
||||
GET_DOMAINS_LIST: 'GET_DOMAINS_LIST',
|
||||
GET_DOMAIN_METRICS_DATA: 'GET_DOMAIN_METRICS_DATA',
|
||||
GET_ENDPOINTS_LIST_BY_DOMAIN: 'GET_ENDPOINTS_LIST_BY_DOMAIN',
|
||||
GET_TOP_ERRORS_BY_DOMAIN: 'GET_TOP_ERRORS_BY_DOMAIN',
|
||||
GET_NESTED_ENDPOINTS_LIST: 'GET_NESTED_ENDPOINTS_LIST',
|
||||
GET_ENDPOINT_METRICS_DATA: 'GET_ENDPOINT_METRICS_DATA',
|
||||
GET_ENDPOINT_STATUS_CODE_DATA: 'GET_ENDPOINT_STATUS_CODE_DATA',
|
||||
|
||||
1595
frontend/src/container/ApiMonitoring/APIMonitoringUtils.test.tsx
Normal file
1595
frontend/src/container/ApiMonitoring/APIMonitoringUtils.test.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,29 +1,17 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Select, Spin, Table, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { Select } from 'antd';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import {
|
||||
EndPointsTableRowData,
|
||||
formatEndPointsDataForTable,
|
||||
getEndPointsColumnsConfig,
|
||||
getEndPointsQueryPayload,
|
||||
getAllEndpointsWidgetData,
|
||||
getGroupByFiltersFromGroupByValues,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||
import { useGetAggregateKeys } from 'hooks/queryBuilder/useGetAggregateKeys';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import ErrorState from './components/ErrorState';
|
||||
import ExpandedRow from './components/ExpandedRow';
|
||||
import { VIEW_TYPES, VIEWS } from './constants';
|
||||
import { SPAN_ATTRIBUTES, VIEWS } from './constants';
|
||||
|
||||
function AllEndPoints({
|
||||
domainName,
|
||||
@@ -31,13 +19,27 @@ function AllEndPoints({
|
||||
setSelectedView,
|
||||
groupBy,
|
||||
setGroupBy,
|
||||
timeRange,
|
||||
initialFilters,
|
||||
setInitialFiltersEndPointStats,
|
||||
}: {
|
||||
domainName: string;
|
||||
setSelectedEndPointName: (name: string) => void;
|
||||
setSelectedView: (tab: VIEWS) => void;
|
||||
groupBy: IBuilderQuery['groupBy'];
|
||||
setGroupBy: (groupBy: IBuilderQuery['groupBy']) => void;
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
};
|
||||
initialFilters: IBuilderQuery['filters'];
|
||||
setInitialFiltersEndPointStats: (filters: IBuilderQuery['filters']) => void;
|
||||
}): JSX.Element {
|
||||
const [groupBySearchValue, setGroupBySearchValue] = useState<string>('');
|
||||
const [allAvailableGroupByOptions, setAllAvailableGroupByOptions] = useState<{
|
||||
[key: string]: any;
|
||||
}>({});
|
||||
|
||||
const {
|
||||
data: groupByFiltersData,
|
||||
isLoading: isLoadingGroupByFilters,
|
||||
@@ -45,7 +47,7 @@ function AllEndPoints({
|
||||
dataSource: DataSource.TRACES,
|
||||
aggregateAttribute: '',
|
||||
aggregateOperator: 'noop',
|
||||
searchText: '',
|
||||
searchText: groupBySearchValue,
|
||||
tagType: '',
|
||||
});
|
||||
|
||||
@@ -53,130 +55,144 @@ function AllEndPoints({
|
||||
{ value: string; label: string }[]
|
||||
>([]);
|
||||
|
||||
const [expandedRowKeys, setExpandedRowKeys] = useState<React.Key[]>([]);
|
||||
|
||||
const handleGroupByChange = useCallback(
|
||||
(value: IBuilderQuery['groupBy']) => {
|
||||
const groupBy = [];
|
||||
const newGroupBy = [];
|
||||
|
||||
for (let index = 0; index < value.length; index++) {
|
||||
const element = (value[index] as unknown) as string;
|
||||
|
||||
const key = groupByFiltersData?.payload?.attributeKeys?.find(
|
||||
(key) => key.key === element,
|
||||
);
|
||||
// Check if the key exists in our cached options first
|
||||
if (allAvailableGroupByOptions[element]) {
|
||||
newGroupBy.push(allAvailableGroupByOptions[element]);
|
||||
} else {
|
||||
// If not found in cache, check the current filtered results
|
||||
const key = groupByFiltersData?.payload?.attributeKeys?.find(
|
||||
(key) => key.key === element,
|
||||
);
|
||||
|
||||
if (key) {
|
||||
groupBy.push(key);
|
||||
if (key) {
|
||||
newGroupBy.push(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
setGroupBy(groupBy);
|
||||
|
||||
setGroupBy(newGroupBy);
|
||||
setGroupBySearchValue('');
|
||||
},
|
||||
[groupByFiltersData, setGroupBy],
|
||||
[groupByFiltersData, setGroupBy, allAvailableGroupByOptions],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (groupByFiltersData?.payload) {
|
||||
// Update dropdown options
|
||||
setGroupByOptions(
|
||||
groupByFiltersData?.payload?.attributeKeys?.map((filter) => ({
|
||||
value: filter.key,
|
||||
label: filter.key,
|
||||
})) || [],
|
||||
);
|
||||
|
||||
// Cache all available options to preserve selected values using functional update
|
||||
// to avoid dependency on allAvailableGroupByOptions
|
||||
setAllAvailableGroupByOptions((prevOptions) => {
|
||||
const newOptions = { ...prevOptions };
|
||||
groupByFiltersData?.payload?.attributeKeys?.forEach((filter) => {
|
||||
newOptions[filter.key] = filter;
|
||||
});
|
||||
return newOptions;
|
||||
});
|
||||
}
|
||||
}, [groupByFiltersData]);
|
||||
}, [groupByFiltersData]); // Only depends on groupByFiltersData now
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const queryPayloads = useMemo(
|
||||
() =>
|
||||
getEndPointsQueryPayload(
|
||||
groupBy,
|
||||
domainName,
|
||||
Math.floor(minTime / 1e9),
|
||||
Math.floor(maxTime / 1e9),
|
||||
),
|
||||
[groupBy, domainName, minTime, maxTime],
|
||||
);
|
||||
|
||||
// Since only one query here
|
||||
const endPointsDataQueries = useQueries(
|
||||
queryPayloads.map((payload) => ({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_ENDPOINTS_LIST_BY_DOMAIN,
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
groupBy,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
staleTime: 60 * 1000, // 1 minute stale time : optimize this part
|
||||
})),
|
||||
);
|
||||
|
||||
const endPointsDataQuery = endPointsDataQueries[0];
|
||||
const {
|
||||
data: allEndPointsData,
|
||||
isLoading,
|
||||
isRefetching,
|
||||
isError,
|
||||
refetch,
|
||||
} = endPointsDataQuery;
|
||||
|
||||
const endPointsColumnsConfig = useMemo(
|
||||
() => getEndPointsColumnsConfig(groupBy.length > 0, expandedRowKeys),
|
||||
[groupBy.length, expandedRowKeys],
|
||||
);
|
||||
|
||||
const expandedRowRender = (record: EndPointsTableRowData): JSX.Element => (
|
||||
<ExpandedRow
|
||||
domainName={domainName}
|
||||
selectedRowData={record}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
setSelectedView={setSelectedView}
|
||||
/>
|
||||
);
|
||||
|
||||
const handleGroupByRowClick = (record: EndPointsTableRowData): void => {
|
||||
if (expandedRowKeys.includes(record.key)) {
|
||||
setExpandedRowKeys(expandedRowKeys.filter((key) => key !== record.key));
|
||||
} else {
|
||||
setExpandedRowKeys((expandedRowKeys) => [...expandedRowKeys, record.key]);
|
||||
// Cache existing selected options on component mount
|
||||
useEffect(() => {
|
||||
if (groupBy && groupBy.length > 0) {
|
||||
setAllAvailableGroupByOptions((prevOptions) => {
|
||||
const newOptions = { ...prevOptions };
|
||||
groupBy.forEach((option) => {
|
||||
newOptions[option.key] = option;
|
||||
});
|
||||
return newOptions;
|
||||
});
|
||||
}
|
||||
};
|
||||
}, [groupBy]); // Removed allAvailableGroupByOptions from dependencies
|
||||
|
||||
const handleRowClick = (record: EndPointsTableRowData): void => {
|
||||
if (groupBy.length === 0) {
|
||||
setSelectedEndPointName(record.endpointName); // this will open up the endpoint details tab
|
||||
setSelectedView(VIEW_TYPES.ENDPOINT_DETAILS);
|
||||
logEvent('API Monitoring: Endpoint name row clicked', {});
|
||||
} else {
|
||||
handleGroupByRowClick(record); // this will prepare the nested query payload
|
||||
}
|
||||
};
|
||||
const currentQuery = initialQueriesMap[DataSource.TRACES];
|
||||
|
||||
const formattedEndPointsData = useMemo(
|
||||
() =>
|
||||
formatEndPointsDataForTable(
|
||||
allEndPointsData?.payload?.data?.result[0]?.table?.rows,
|
||||
groupBy,
|
||||
),
|
||||
[groupBy, allEndPointsData],
|
||||
// Local state for filters, combining endpoint filter and search filters
|
||||
const [filters, setFilters] = useState<IBuilderQuery['filters']>(() => {
|
||||
// Initialize filters based on the initial endPointName prop
|
||||
const initialItems = [...initialFilters.items];
|
||||
return { op: 'AND', items: initialItems };
|
||||
});
|
||||
|
||||
// Handler for changes from the QueryBuilderSearchV2 component
|
||||
const handleFilterChange = useCallback(
|
||||
(newFilters: IBuilderQuery['filters']): void => {
|
||||
// 1. Update local filters state immediately
|
||||
setFilters(newFilters);
|
||||
},
|
||||
[], // Dependencies for the callback
|
||||
);
|
||||
|
||||
if (isError) {
|
||||
return (
|
||||
<div className="all-endpoints-error-state-wrapper">
|
||||
<ErrorState refetch={refetch} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
const updatedCurrentQuery = useMemo(
|
||||
() => ({
|
||||
...currentQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...currentQuery.builder.queryData[0],
|
||||
dataSource: DataSource.TRACES,
|
||||
filters, // Use the local filters state
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
[filters, currentQuery],
|
||||
);
|
||||
|
||||
const query = updatedCurrentQuery?.builder?.queryData[0] || null;
|
||||
|
||||
const allEndpointsWidgetData = useMemo(
|
||||
() => getAllEndpointsWidgetData(groupBy, domainName, filters),
|
||||
[groupBy, domainName, filters],
|
||||
);
|
||||
|
||||
const onRowClick = useCallback(
|
||||
(props: any): void => {
|
||||
setSelectedEndPointName(props[SPAN_ATTRIBUTES.URL_PATH] as string);
|
||||
setSelectedView(VIEWS.ENDPOINT_STATS);
|
||||
const initialItems = [
|
||||
...filters.items,
|
||||
...getGroupByFiltersFromGroupByValues(props, groupBy).items,
|
||||
];
|
||||
setInitialFiltersEndPointStats({
|
||||
items: initialItems,
|
||||
op: 'AND',
|
||||
});
|
||||
},
|
||||
[
|
||||
filters,
|
||||
setInitialFiltersEndPointStats,
|
||||
setSelectedEndPointName,
|
||||
setSelectedView,
|
||||
groupBy,
|
||||
],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="all-endpoints-container">
|
||||
<div className="all-endpoints-header">
|
||||
<div className="filter-container">
|
||||
<QueryBuilderSearchV2
|
||||
query={query}
|
||||
onChange={handleFilterChange}
|
||||
placeholder="Search for filters..."
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="group-by-container">
|
||||
<div className="group-by-label"> Group by </div>
|
||||
<Select
|
||||
@@ -189,49 +205,17 @@ function AllEndPoints({
|
||||
placeholder="Search for attribute"
|
||||
options={groupByOptions}
|
||||
onChange={handleGroupByChange}
|
||||
onSearch={(value: string): void => setGroupBySearchValue(value)}
|
||||
/>{' '}
|
||||
</div>
|
||||
<div className="endpoints-table-container">
|
||||
<div className="endpoints-table-header">Endpoint overview</div>
|
||||
<Table
|
||||
columns={endPointsColumnsConfig}
|
||||
loading={{
|
||||
spinning: isLoading || isRefetching,
|
||||
indicator: <Spin indicator={<LoadingOutlined size={14} spin />} />,
|
||||
}}
|
||||
dataSource={isLoading || isRefetching ? [] : formattedEndPointsData}
|
||||
locale={{
|
||||
emptyText:
|
||||
isLoading || isRefetching ? null : (
|
||||
<div className="no-filtered-endpoints-message-container">
|
||||
<div className="no-filtered-endpoints-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
|
||||
<Typography.Text className="no-filtered-endpoints-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
}}
|
||||
scroll={{ x: true }}
|
||||
tableLayout="fixed"
|
||||
onRow={(record): { onClick: () => void; className: string } => ({
|
||||
onClick: (): void => handleRowClick(record),
|
||||
className: 'clickable-row',
|
||||
})}
|
||||
expandable={{
|
||||
expandedRowRender: groupBy.length > 0 ? expandedRowRender : undefined,
|
||||
expandedRowKeys,
|
||||
expandIconColumnIndex: -1,
|
||||
}}
|
||||
rowClassName={(_, index): string =>
|
||||
index % 2 === 0 ? 'table-row-dark' : 'table-row-light'
|
||||
}
|
||||
<GridCard
|
||||
widget={allEndpointsWidgetData}
|
||||
isQueryEnabled
|
||||
onDragSelect={(): void => {}}
|
||||
customOnDragSelect={(): void => {}}
|
||||
customTimeRange={timeRange}
|
||||
customOnRowClick={onRowClick}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -12,6 +12,12 @@
|
||||
background: var(--bg-ink-300);
|
||||
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
.domain-details-drawer-header-right-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
.domain-detail-drawer {
|
||||
@@ -246,6 +252,9 @@
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
|
||||
.endpoints-table-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
padding: 12px;
|
||||
color: var(--Vanilla-100, #fff);
|
||||
font-family: Inter;
|
||||
@@ -299,6 +308,7 @@
|
||||
|
||||
.ant-table-tbody > tr:hover > td {
|
||||
background: rgba(255, 255, 255, 0.04);
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.ant-table-cell:first-child {
|
||||
@@ -386,6 +396,21 @@
|
||||
padding-top: 20px;
|
||||
}
|
||||
|
||||
.top-errors-dropdown-container {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
gap: 10px;
|
||||
align-items: center;
|
||||
|
||||
.endpoint-details-filters-container-dropdown {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.endpoint-details-filters-container-search {
|
||||
flex: 1;
|
||||
}
|
||||
}
|
||||
|
||||
.endpoint-details-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
@@ -690,30 +715,140 @@
|
||||
border-radius: 3px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
|
||||
.top-services-title {
|
||||
border-bottom: 1px solid var(--bg-slate-500);
|
||||
padding: 10px 12px;
|
||||
border-radius: 3px 3px 0px 0px;
|
||||
background: rgba(171, 189, 255, 0.04);
|
||||
.title-wrapper {
|
||||
display: inline-flex;
|
||||
padding: 1px 2px;
|
||||
align-items: center;
|
||||
border-radius: 2px;
|
||||
background: rgba(113, 144, 249, 0.08);
|
||||
|
||||
.title-wrapper {
|
||||
display: inline-flex;
|
||||
padding: 1px 2px;
|
||||
align-items: center;
|
||||
border-radius: 2px;
|
||||
background: rgba(113, 144, 249, 0.08);
|
||||
|
||||
color: var(--bg-robin-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
color: var(--bg-robin-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
.dependent-services-container {
|
||||
padding: 10px 12px;
|
||||
border-radius: 3px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
.ant-table {
|
||||
.ant-table-thead > tr > th {
|
||||
padding: 12px;
|
||||
font-weight: 500;
|
||||
font-size: 12px;
|
||||
line-height: 18px;
|
||||
border-bottom: none;
|
||||
|
||||
color: var(--text-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 11px;
|
||||
font-style: normal;
|
||||
font-weight: 600;
|
||||
line-height: 18px;
|
||||
/* 163.636% */
|
||||
letter-spacing: 0.44px;
|
||||
text-transform: uppercase;
|
||||
background: none;
|
||||
|
||||
&::before {
|
||||
background-color: transparent;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-table-thead > tr > th:has(.status-code-header) {
|
||||
background: var(--bg-ink-300);
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.ant-table-cell {
|
||||
padding: 12px;
|
||||
font-size: 13px;
|
||||
line-height: 20px;
|
||||
color: var(--bg-vanilla-100);
|
||||
border-bottom: none;
|
||||
background: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
.ant-table-cell:has(.col-title) {
|
||||
background: rgba(171, 189, 255, 0.04);
|
||||
}
|
||||
|
||||
.ant-table-cell:has(.top-services-item-latency) {
|
||||
text-align: center;
|
||||
opacity: 0.8;
|
||||
background: rgba(171, 189, 255, 0.04);
|
||||
}
|
||||
|
||||
.ant-table-cell:has(.top-services-item-latency-title) {
|
||||
text-align: center;
|
||||
opacity: 0.8;
|
||||
background: rgba(171, 189, 255, 0.04);
|
||||
}
|
||||
|
||||
.ant-table-tbody > tr:hover > td {
|
||||
background: rgba(255, 255, 255, 0.04);
|
||||
}
|
||||
|
||||
.ant-table-cell:first-child {
|
||||
text-align: justify;
|
||||
}
|
||||
|
||||
.ant-table-cell:nth-child(2) {
|
||||
padding-left: 16px;
|
||||
padding-right: 16px;
|
||||
}
|
||||
|
||||
.ant-table-cell:nth-child(n + 3) {
|
||||
padding-right: 24px;
|
||||
}
|
||||
|
||||
.ant-table-tbody > tr > td {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.ant-table-thead
|
||||
> tr
|
||||
> th:not(:last-child):not(.ant-table-selection-column):not(.ant-table-row-expand-icon-cell):not([colspan])::before {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.ant-empty-normal {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
.table-row-dark {
|
||||
background: var(--bg-ink-300);
|
||||
}
|
||||
|
||||
.ant-table-content {
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
}
|
||||
|
||||
.no-status-code-data-message-container {
|
||||
height: 30vh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
|
||||
.no-status-code-data-message-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
justify-content: center;
|
||||
|
||||
width: fit-content;
|
||||
padding: 24px;
|
||||
}
|
||||
|
||||
.no-status-code-data-message {
|
||||
margin-top: 8px;
|
||||
}
|
||||
}
|
||||
|
||||
.top-services-item {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
@@ -743,6 +878,7 @@
|
||||
|
||||
.top-services-item-progress-bar {
|
||||
background-color: var(--bg-slate-400);
|
||||
border-radius: 2px;
|
||||
height: 100%;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
@@ -758,7 +894,7 @@
|
||||
|
||||
.top-services-load-more {
|
||||
border-top: 1px solid var(--bg-slate-500);
|
||||
padding-top: 10px;
|
||||
padding: 10px;
|
||||
|
||||
color: var(--text-vanilla-400);
|
||||
font-family: Inter;
|
||||
|
||||
@@ -3,15 +3,27 @@ import './DomainDetails.styles.scss';
|
||||
import { Color, Spacing } from '@signozhq/design-tokens';
|
||||
import { Button, Divider, Drawer, Radio, Typography } from 'antd';
|
||||
import { RadioChangeEvent } from 'antd/lib';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import {
|
||||
CustomTimeType,
|
||||
Time,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import GetMinMax from 'lib/getMinMax';
|
||||
import { ArrowDown, ArrowUp, X } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import AllEndPoints from './AllEndPoints';
|
||||
import DomainMetrics from './components/DomainMetrics';
|
||||
import { VIEW_TYPES, VIEWS } from './constants';
|
||||
import EndPointDetailsWrapper from './EndPointDetailsWrapper';
|
||||
import EndPointDetails from './EndPointDetails';
|
||||
import TopErrors from './TopErrors';
|
||||
|
||||
const TimeRangeOffset = 1000000000;
|
||||
|
||||
function DomainDetails({
|
||||
domainData,
|
||||
@@ -33,12 +45,58 @@ function DomainDetails({
|
||||
const [endPointsGroupBy, setEndPointsGroupBy] = useState<
|
||||
IBuilderQuery['groupBy']
|
||||
>([]);
|
||||
const [initialFiltersEndPointStats, setInitialFiltersEndPointStats] = useState<
|
||||
IBuilderQuery['filters']
|
||||
>(domainListFilters);
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const handleTabChange = (e: RadioChangeEvent): void => {
|
||||
setSelectedView(e.target.value);
|
||||
};
|
||||
|
||||
const { maxTime, minTime, selectedTime } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
>((state) => state.globalTime);
|
||||
|
||||
const startMs = useMemo(() => Math.floor(Number(minTime) / TimeRangeOffset), [
|
||||
minTime,
|
||||
]);
|
||||
const endMs = useMemo(() => Math.floor(Number(maxTime) / TimeRangeOffset), [
|
||||
maxTime,
|
||||
]);
|
||||
|
||||
const [selectedInterval, setSelectedInterval] = useState<Time>(
|
||||
selectedTime as Time,
|
||||
);
|
||||
|
||||
const [modalTimeRange, setModalTimeRange] = useState(() => ({
|
||||
startTime: startMs,
|
||||
endTime: endMs,
|
||||
}));
|
||||
|
||||
const handleTimeChange = useCallback(
|
||||
(interval: Time | CustomTimeType, dateTimeRange?: [number, number]): void => {
|
||||
setSelectedInterval(interval as Time);
|
||||
|
||||
if (interval === 'custom' && dateTimeRange) {
|
||||
setModalTimeRange({
|
||||
startTime: Math.floor(dateTimeRange[0] / 1000),
|
||||
endTime: Math.floor(dateTimeRange[1] / 1000),
|
||||
});
|
||||
} else {
|
||||
const { maxTime, minTime } = GetMinMax(interval);
|
||||
|
||||
setModalTimeRange({
|
||||
startTime: Math.floor(minTime / TimeRangeOffset),
|
||||
endTime: Math.floor(maxTime / TimeRangeOffset),
|
||||
});
|
||||
}
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[],
|
||||
);
|
||||
|
||||
return (
|
||||
<Drawer
|
||||
width="60%"
|
||||
@@ -50,32 +108,44 @@ function DomainDetails({
|
||||
{domainData.domainName}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<Button.Group className="domain-details-drawer-header-ctas">
|
||||
<Button
|
||||
className="domain-navigate-cta"
|
||||
onClick={(): void => {
|
||||
setSelectedDomainIndex(selectedDomainIndex - 1);
|
||||
setSelectedEndPointName('');
|
||||
setEndPointsGroupBy([]);
|
||||
setSelectedView(VIEW_TYPES.ALL_ENDPOINTS);
|
||||
}}
|
||||
icon={<ArrowUp size={16} />}
|
||||
disabled={selectedDomainIndex === 0}
|
||||
title="Previous domain"
|
||||
<div className="domain-details-drawer-header-right-container">
|
||||
<DateTimeSelectionV2
|
||||
showAutoRefresh={false}
|
||||
showRefreshText={false}
|
||||
onTimeChange={handleTimeChange}
|
||||
defaultRelativeTime="5m"
|
||||
isModalTimeSelection
|
||||
modalSelectedInterval={selectedInterval}
|
||||
modalInitialStartTime={modalTimeRange.startTime * 1000}
|
||||
modalInitialEndTime={modalTimeRange.endTime * 1000}
|
||||
/>
|
||||
<Button
|
||||
className="domain-navigate-cta"
|
||||
onClick={(): void => {
|
||||
setSelectedDomainIndex(selectedDomainIndex + 1);
|
||||
setSelectedEndPointName('');
|
||||
setEndPointsGroupBy([]);
|
||||
setSelectedView(VIEW_TYPES.ALL_ENDPOINTS);
|
||||
}}
|
||||
icon={<ArrowDown size={16} />}
|
||||
disabled={selectedDomainIndex === domainListLength - 1}
|
||||
title="Next domain"
|
||||
/>
|
||||
</Button.Group>
|
||||
<Button.Group className="domain-details-drawer-header-ctas">
|
||||
<Button
|
||||
className="domain-navigate-cta"
|
||||
onClick={(): void => {
|
||||
setSelectedDomainIndex(selectedDomainIndex - 1);
|
||||
setSelectedEndPointName('');
|
||||
setEndPointsGroupBy([]);
|
||||
setSelectedView(VIEW_TYPES.ALL_ENDPOINTS);
|
||||
}}
|
||||
icon={<ArrowUp size={16} />}
|
||||
disabled={selectedDomainIndex === 0}
|
||||
title="Previous domain"
|
||||
/>
|
||||
<Button
|
||||
className="domain-navigate-cta"
|
||||
onClick={(): void => {
|
||||
setSelectedDomainIndex(selectedDomainIndex + 1);
|
||||
setSelectedEndPointName('');
|
||||
setEndPointsGroupBy([]);
|
||||
setSelectedView(VIEW_TYPES.ALL_ENDPOINTS);
|
||||
}}
|
||||
icon={<ArrowDown size={16} />}
|
||||
disabled={selectedDomainIndex === domainListLength - 1}
|
||||
title="Next domain"
|
||||
/>
|
||||
</Button.Group>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
placement="right"
|
||||
@@ -91,7 +161,11 @@ function DomainDetails({
|
||||
>
|
||||
{domainData && (
|
||||
<>
|
||||
<DomainMetrics domainData={domainData} />
|
||||
<DomainMetrics
|
||||
domainName={domainData.domainName}
|
||||
domainListFilters={domainListFilters}
|
||||
timeRange={modalTimeRange}
|
||||
/>
|
||||
<div className="views-tabs-container">
|
||||
<Radio.Group
|
||||
className="views-tabs"
|
||||
@@ -109,13 +183,21 @@ function DomainDetails({
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.ENDPOINT_DETAILS
|
||||
selectedView === VIEW_TYPES.ENDPOINT_STATS
|
||||
? 'tab selected_view'
|
||||
: 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.ENDPOINT_DETAILS}
|
||||
value={VIEW_TYPES.ENDPOINT_STATS}
|
||||
>
|
||||
<div className="view-title">Endpoint Details</div>
|
||||
<div className="view-title">Endpoint(s) Stats</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.TOP_ERRORS ? 'tab selected_view' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.TOP_ERRORS}
|
||||
>
|
||||
<div className="view-title">Top 10 Errors</div>
|
||||
</Radio.Button>
|
||||
</Radio.Group>
|
||||
</div>
|
||||
@@ -126,15 +208,28 @@ function DomainDetails({
|
||||
setSelectedView={setSelectedView}
|
||||
groupBy={endPointsGroupBy}
|
||||
setGroupBy={setEndPointsGroupBy}
|
||||
timeRange={modalTimeRange}
|
||||
initialFilters={domainListFilters}
|
||||
setInitialFiltersEndPointStats={setInitialFiltersEndPointStats}
|
||||
/>
|
||||
)}
|
||||
|
||||
{selectedView === VIEW_TYPES.ENDPOINT_DETAILS && (
|
||||
<EndPointDetailsWrapper
|
||||
{selectedView === VIEW_TYPES.ENDPOINT_STATS && (
|
||||
<EndPointDetails
|
||||
domainName={domainData.domainName}
|
||||
endPointName={selectedEndPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
initialFilters={initialFiltersEndPointStats}
|
||||
timeRange={modalTimeRange}
|
||||
handleTimeChange={handleTimeChange}
|
||||
/>
|
||||
)}
|
||||
|
||||
{selectedView === VIEW_TYPES.TOP_ERRORS && (
|
||||
<TopErrors
|
||||
domainName={domainData.domainName}
|
||||
timeRange={modalTimeRange}
|
||||
initialFilters={domainListFilters}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
|
||||
@@ -8,16 +8,18 @@ import {
|
||||
getRateOverTimeWidgetData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||
import {
|
||||
CustomTimeType,
|
||||
Time,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import DependentServices from './components/DependentServices';
|
||||
import EndPointMetrics from './components/EndPointMetrics';
|
||||
@@ -25,33 +27,107 @@ import EndPointsDropDown from './components/EndPointsDropDown';
|
||||
import MetricOverTimeGraph from './components/MetricOverTimeGraph';
|
||||
import StatusCodeBarCharts from './components/StatusCodeBarCharts';
|
||||
import StatusCodeTable from './components/StatusCodeTable';
|
||||
import { SPAN_ATTRIBUTES } from './constants';
|
||||
|
||||
const httpUrlKey = {
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
type: 'tag',
|
||||
};
|
||||
|
||||
function EndPointDetails({
|
||||
domainName,
|
||||
endPointName,
|
||||
setSelectedEndPointName,
|
||||
domainListFilters,
|
||||
initialFilters,
|
||||
timeRange,
|
||||
handleTimeChange,
|
||||
}: {
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
initialFilters: IBuilderQuery['filters'];
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
};
|
||||
handleTimeChange: (
|
||||
interval: Time | CustomTimeType,
|
||||
dateTimeRange?: [number, number],
|
||||
) => void;
|
||||
}): JSX.Element {
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const { startTime: minTime, endTime: maxTime } = timeRange;
|
||||
|
||||
const currentQuery = initialQueriesMap[DataSource.TRACES];
|
||||
|
||||
const [filters, setFilters] = useState<IBuilderQuery['filters']>({
|
||||
op: 'AND',
|
||||
items: [],
|
||||
// Local state for filters, combining endpoint filter and search filters
|
||||
const [filters, setFilters] = useState<IBuilderQuery['filters']>(() => {
|
||||
// Initialize filters based on the initial endPointName prop
|
||||
const initialItems = [...initialFilters.items];
|
||||
if (endPointName) {
|
||||
initialItems.push({
|
||||
id: '92b8a1c1',
|
||||
key: httpUrlKey,
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
});
|
||||
}
|
||||
return { op: 'AND', items: initialItems };
|
||||
});
|
||||
|
||||
// Manually update the query to include the filters
|
||||
// Because using the hook is causing the global domain
|
||||
// query to be updated and causing main domain list to
|
||||
// refetch with the filters of endpoints
|
||||
// Effect to synchronize local filters when the endPointName prop changes (e.g., from dropdown)
|
||||
useEffect(() => {
|
||||
setFilters((currentFilters) => {
|
||||
const existingHttpUrlFilter = currentFilters.items.find(
|
||||
(item) => item.key?.key === httpUrlKey.key,
|
||||
);
|
||||
const existingHttpUrlValue = (existingHttpUrlFilter?.value as string) || '';
|
||||
|
||||
// Only update filters if the prop value is different from what's already in filters
|
||||
if (endPointName === existingHttpUrlValue) {
|
||||
return currentFilters; // No change needed, prevents loop
|
||||
}
|
||||
|
||||
// Rebuild filters: Keep non-http.url filters and add/update http.url filter based on prop
|
||||
const otherFilters = currentFilters.items.filter(
|
||||
(item) => item.key?.key !== httpUrlKey.key,
|
||||
);
|
||||
const newItems = [...otherFilters];
|
||||
if (endPointName) {
|
||||
newItems.push({
|
||||
id: '92b8a1c1',
|
||||
key: httpUrlKey,
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
});
|
||||
}
|
||||
return { op: 'AND', items: newItems };
|
||||
});
|
||||
}, [endPointName]);
|
||||
|
||||
// Handler for changes from the QueryBuilderSearchV2 component
|
||||
const handleFilterChange = useCallback(
|
||||
(newFilters: IBuilderQuery['filters']): void => {
|
||||
// 1. Update local filters state immediately
|
||||
setFilters(newFilters);
|
||||
|
||||
// 2. Derive the endpoint name from the *new* filters state
|
||||
const httpUrlFilter = newFilters.items.find(
|
||||
(item) => item.key?.key === httpUrlKey.key,
|
||||
);
|
||||
const derivedEndPointName = (httpUrlFilter?.value as string) || '';
|
||||
|
||||
// 3. If the derived endpoint name is different from the current prop,
|
||||
// it means the search change modified the effective endpoint.
|
||||
// Notify the parent component.
|
||||
if (derivedEndPointName !== endPointName) {
|
||||
setSelectedEndPointName(derivedEndPointName);
|
||||
}
|
||||
},
|
||||
[endPointName, setSelectedEndPointName], // Dependencies for the callback
|
||||
);
|
||||
|
||||
const updatedCurrentQuery = useMemo(
|
||||
() => ({
|
||||
@@ -62,7 +138,7 @@ function EndPointDetails({
|
||||
{
|
||||
...currentQuery.builder.queryData[0],
|
||||
dataSource: DataSource.TRACES,
|
||||
filters,
|
||||
filters, // Use the local filters state
|
||||
},
|
||||
],
|
||||
},
|
||||
@@ -78,15 +154,8 @@ function EndPointDetails({
|
||||
);
|
||||
|
||||
const endPointDetailsQueryPayload = useMemo(
|
||||
() =>
|
||||
getEndPointDetailsQueryPayload(
|
||||
domainName,
|
||||
endPointName,
|
||||
Math.floor(minTime / 1e9),
|
||||
Math.floor(maxTime / 1e9),
|
||||
filters,
|
||||
),
|
||||
[domainName, endPointName, filters, minTime, maxTime],
|
||||
() => getEndPointDetailsQueryPayload(domainName, minTime, maxTime, filters),
|
||||
[domainName, filters, minTime, maxTime],
|
||||
);
|
||||
|
||||
const endPointDetailsDataQueries = useQueries(
|
||||
@@ -94,7 +163,7 @@ function EndPointDetails({
|
||||
queryKey: [
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[index],
|
||||
payload,
|
||||
filters.items,
|
||||
filters.items, // Include filters.items in queryKey for better caching
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
@@ -123,22 +192,30 @@ function EndPointDetails({
|
||||
);
|
||||
|
||||
const { endpoint, port } = useMemo(
|
||||
() => extractPortAndEndpoint(endPointName),
|
||||
() => extractPortAndEndpoint(endPointName), // Derive display info from the prop
|
||||
[endPointName],
|
||||
);
|
||||
|
||||
const [rateOverTimeWidget, latencyOverTimeWidget] = useMemo(
|
||||
() => [
|
||||
getRateOverTimeWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
getLatencyOverTimeWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
getRateOverTimeWidgetData(domainName, endPointName, filters),
|
||||
getLatencyOverTimeWidgetData(domainName, endPointName, filters),
|
||||
],
|
||||
[domainName, endPointName, filters, domainListFilters],
|
||||
[domainName, endPointName, filters], // Use combinedFilters
|
||||
);
|
||||
|
||||
// // [TODO] Fix this later
|
||||
const onDragSelect = useCallback(
|
||||
(start: number, end: number) => {
|
||||
const startTimestamp = Math.trunc(start);
|
||||
const endTimestamp = Math.trunc(end);
|
||||
|
||||
if (startTimestamp !== endTimestamp) {
|
||||
// update the value in local time picker
|
||||
handleTimeChange('custom', [startTimestamp, endTimestamp]);
|
||||
}
|
||||
},
|
||||
[handleTimeChange],
|
||||
);
|
||||
|
||||
return (
|
||||
@@ -156,9 +233,7 @@ function EndPointDetails({
|
||||
<div className="endpoint-details-filters-container-search">
|
||||
<QueryBuilderSearchV2
|
||||
query={query}
|
||||
onChange={(searchFilters): void => {
|
||||
setFilters(searchFilters);
|
||||
}}
|
||||
onChange={handleFilterChange}
|
||||
placeholder="Search for filters..."
|
||||
/>
|
||||
</div>
|
||||
@@ -166,7 +241,9 @@ function EndPointDetails({
|
||||
<div className="endpoint-meta-data">
|
||||
<div className="endpoint-meta-data-pill">
|
||||
<div className="endpoint-meta-data-label">Endpoint</div>
|
||||
<div className="endpoint-meta-data-value">{endpoint || '-'}</div>
|
||||
<div className="endpoint-meta-data-value">
|
||||
{endpoint || 'All Endpoints'}
|
||||
</div>
|
||||
</div>
|
||||
<div className="endpoint-meta-data-pill">
|
||||
<div className="endpoint-meta-data-label">Port</div>
|
||||
@@ -177,6 +254,7 @@ function EndPointDetails({
|
||||
{!isServicesFilterApplied && (
|
||||
<DependentServices
|
||||
dependentServicesQuery={endPointDependentServicesDataQuery}
|
||||
timeRange={timeRange}
|
||||
/>
|
||||
)}
|
||||
<StatusCodeBarCharts
|
||||
@@ -186,12 +264,21 @@ function EndPointDetails({
|
||||
}
|
||||
domainName={domainName}
|
||||
endPointName={endPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
filters={filters}
|
||||
timeRange={timeRange}
|
||||
onDragSelect={onDragSelect}
|
||||
/>
|
||||
<StatusCodeTable endPointStatusCodeDataQuery={endPointStatusCodeDataQuery} />
|
||||
<MetricOverTimeGraph widget={rateOverTimeWidget} />
|
||||
<MetricOverTimeGraph widget={latencyOverTimeWidget} />
|
||||
<MetricOverTimeGraph
|
||||
widget={rateOverTimeWidget}
|
||||
timeRange={timeRange}
|
||||
onDragSelect={onDragSelect}
|
||||
/>
|
||||
<MetricOverTimeGraph
|
||||
widget={latencyOverTimeWidget}
|
||||
timeRange={timeRange}
|
||||
onDragSelect={onDragSelect}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { getEndPointZeroStateQueryPayload } from 'container/ApiMonitoring/utils';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useMemo } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import EndPointDetailsZeroState from './components/EndPointDetailsZeroState';
|
||||
import EndPointDetails from './EndPointDetails';
|
||||
|
||||
function EndPointDetailsWrapper({
|
||||
domainName,
|
||||
endPointName,
|
||||
setSelectedEndPointName,
|
||||
domainListFilters,
|
||||
}: {
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const endPointZeroStateQueryPayload = useMemo(
|
||||
() =>
|
||||
getEndPointZeroStateQueryPayload(
|
||||
domainName,
|
||||
Math.floor(minTime / 1e9),
|
||||
Math.floor(maxTime / 1e9),
|
||||
),
|
||||
[domainName, minTime, maxTime],
|
||||
);
|
||||
|
||||
const endPointZeroStateDataQueries = useQueries(
|
||||
endPointZeroStateQueryPayload.map((payload) => ({
|
||||
queryKey: [
|
||||
// Since only one query here
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_DROPDOWN_DATA,
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
})),
|
||||
);
|
||||
|
||||
const [endPointZeroStateDataQuery] = useMemo(
|
||||
() => [endPointZeroStateDataQueries[0]],
|
||||
[endPointZeroStateDataQueries],
|
||||
);
|
||||
|
||||
if (endPointName === '') {
|
||||
return (
|
||||
<EndPointDetailsZeroState
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
endPointDropDownDataQuery={endPointZeroStateDataQuery}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<EndPointDetails
|
||||
domainName={domainName}
|
||||
endPointName={endPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default EndPointDetailsWrapper;
|
||||
@@ -0,0 +1,251 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Spin, Switch, Table, Tooltip, Typography } from 'antd';
|
||||
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
||||
import { DEFAULT_ENTITY_VERSION, ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||
formatTopErrorsDataForTable,
|
||||
getEndPointDetailsQueryPayload,
|
||||
getTopErrorsColumnsConfig,
|
||||
getTopErrorsCoRelationQueryFilters,
|
||||
getTopErrorsQueryPayload,
|
||||
TopErrorsResponseRow,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { Info } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import EndPointsDropDown from './components/EndPointsDropDown';
|
||||
import ErrorState from './components/ErrorState';
|
||||
import { SPAN_ATTRIBUTES } from './constants';
|
||||
|
||||
function TopErrors({
|
||||
domainName,
|
||||
timeRange,
|
||||
initialFilters,
|
||||
}: {
|
||||
domainName: string;
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
};
|
||||
initialFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const { startTime: minTime, endTime: maxTime } = timeRange;
|
||||
|
||||
const [endPointName, setSelectedEndPointName] = useState<string>('');
|
||||
const [showStatusCodeErrors, setShowStatusCodeErrors] = useState<boolean>(
|
||||
true,
|
||||
);
|
||||
|
||||
const queryPayloads = useMemo(
|
||||
() =>
|
||||
getTopErrorsQueryPayload(
|
||||
domainName,
|
||||
minTime,
|
||||
maxTime,
|
||||
{
|
||||
items: endPointName
|
||||
? [
|
||||
{
|
||||
id: '92b8a1c1',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
},
|
||||
...initialFilters.items,
|
||||
]
|
||||
: [...initialFilters.items],
|
||||
op: 'AND',
|
||||
},
|
||||
showStatusCodeErrors,
|
||||
),
|
||||
[
|
||||
domainName,
|
||||
endPointName,
|
||||
minTime,
|
||||
maxTime,
|
||||
initialFilters,
|
||||
showStatusCodeErrors,
|
||||
],
|
||||
);
|
||||
|
||||
const topErrorsDataQueries = useQueries(
|
||||
queryPayloads.map((payload) => ({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
|
||||
payload,
|
||||
DEFAULT_ENTITY_VERSION,
|
||||
showStatusCodeErrors,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, DEFAULT_ENTITY_VERSION),
|
||||
enabled: !!payload,
|
||||
staleTime: 0,
|
||||
cacheTime: 0,
|
||||
})),
|
||||
);
|
||||
|
||||
const topErrorsDataQuery = topErrorsDataQueries[0];
|
||||
const {
|
||||
data: topErrorsData,
|
||||
isLoading,
|
||||
isRefetching,
|
||||
isError,
|
||||
refetch,
|
||||
} = topErrorsDataQuery;
|
||||
|
||||
const topErrorsColumnsConfig = useMemo(() => getTopErrorsColumnsConfig(), []);
|
||||
|
||||
const formattedTopErrorsData = useMemo(
|
||||
() =>
|
||||
formatTopErrorsDataForTable(
|
||||
topErrorsData?.payload?.data?.result as TopErrorsResponseRow[],
|
||||
),
|
||||
[topErrorsData],
|
||||
);
|
||||
|
||||
const endPointDropDownQueryPayload = useMemo(
|
||||
() => [
|
||||
getEndPointDetailsQueryPayload(domainName, minTime, maxTime, {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
})[2],
|
||||
],
|
||||
[domainName, minTime, maxTime],
|
||||
);
|
||||
|
||||
const endPointDropDownDataQueries = useQueries(
|
||||
endPointDropDownQueryPayload.map((payload) => ({
|
||||
queryKey: [
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[4],
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
staleTime: 60 * 1000,
|
||||
})),
|
||||
);
|
||||
|
||||
const [endPointDropDownDataQuery] = useMemo(
|
||||
() => [endPointDropDownDataQueries[0]],
|
||||
[endPointDropDownDataQueries],
|
||||
);
|
||||
|
||||
const navigateToExplorer = useNavigateToExplorer();
|
||||
|
||||
if (isError) {
|
||||
return (
|
||||
<div className="all-endpoints-error-state-wrapper">
|
||||
<ErrorState refetch={refetch} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="all-endpoints-container">
|
||||
<div className="top-errors-dropdown-container">
|
||||
<div className="endpoint-details-filters-container-dropdown">
|
||||
<EndPointsDropDown
|
||||
selectedEndPointName={endPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
endPointDropDownDataQuery={endPointDropDownDataQuery}
|
||||
parentContainerDiv=".endpoint-details-filters-container"
|
||||
/>
|
||||
</div>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: '8px' }}>
|
||||
<Switch
|
||||
checked={showStatusCodeErrors}
|
||||
onChange={setShowStatusCodeErrors}
|
||||
size="small"
|
||||
/>
|
||||
<span style={{ color: 'white', fontSize: '14px' }}>
|
||||
Status Message Exists
|
||||
</span>
|
||||
<Tooltip title="When enabled, shows errors that have a status message. When disabled, shows all errors regardless of status message">
|
||||
<Info size={16} color="white" />
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="endpoints-table-container">
|
||||
<div className="endpoints-table-header">
|
||||
{showStatusCodeErrors ? 'Errors with Status Message' : 'All Errors'}{' '}
|
||||
<Tooltip
|
||||
title={
|
||||
showStatusCodeErrors
|
||||
? 'Shows errors that have a status message'
|
||||
: 'Shows all errors regardless of status message'
|
||||
}
|
||||
>
|
||||
<Info size={16} color="white" />
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Table
|
||||
columns={topErrorsColumnsConfig}
|
||||
loading={{
|
||||
spinning: isLoading || isRefetching,
|
||||
indicator: <Spin indicator={<LoadingOutlined size={14} spin />} />,
|
||||
}}
|
||||
dataSource={isLoading || isRefetching ? [] : formattedTopErrorsData}
|
||||
locale={{
|
||||
emptyText:
|
||||
isLoading || isRefetching ? null : (
|
||||
<div className="no-filtered-endpoints-message-container">
|
||||
<div className="no-filtered-endpoints-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
|
||||
<Typography.Text className="no-filtered-endpoints-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
}}
|
||||
scroll={{ x: true }}
|
||||
tableLayout="fixed"
|
||||
rowClassName={(_, index): string =>
|
||||
index % 2 === 0 ? 'table-row-dark' : 'table-row-light'
|
||||
}
|
||||
onRow={(record): { onClick: () => void } => ({
|
||||
onClick: (): void => {
|
||||
const filters = getTopErrorsCoRelationQueryFilters(
|
||||
domainName,
|
||||
record.endpointName,
|
||||
record.statusCode,
|
||||
);
|
||||
navigateToExplorer({
|
||||
filters: [...filters.items],
|
||||
dataSource: DataSource.TRACES,
|
||||
startTime: minTime,
|
||||
endTime: maxTime,
|
||||
shouldResolveQuery: true,
|
||||
});
|
||||
},
|
||||
})}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default TopErrors;
|
||||
@@ -1,6 +1,13 @@
|
||||
import { Typography } from 'antd';
|
||||
import '../DomainDetails.styles.scss';
|
||||
|
||||
import { Table, TablePaginationConfig, Typography } from 'antd';
|
||||
import Skeleton from 'antd/lib/skeleton';
|
||||
import { getFormattedDependentServicesData } from 'container/ApiMonitoring/utils';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import {
|
||||
dependentServicesColumns,
|
||||
DependentServicesData,
|
||||
getFormattedDependentServicesData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { UnfoldVertical } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
@@ -10,10 +17,15 @@ import ErrorState from './ErrorState';
|
||||
|
||||
interface DependentServicesProps {
|
||||
dependentServicesQuery: UseQueryResult<SuccessResponse<any>, unknown>;
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
};
|
||||
}
|
||||
|
||||
function DependentServices({
|
||||
dependentServicesQuery,
|
||||
timeRange,
|
||||
}: DependentServicesProps): JSX.Element {
|
||||
const {
|
||||
data,
|
||||
@@ -23,19 +35,25 @@ function DependentServices({
|
||||
isRefetching,
|
||||
} = dependentServicesQuery;
|
||||
|
||||
const [currentRenderCount, setCurrentRenderCount] = useState(0);
|
||||
const [isExpanded, setIsExpanded] = useState<boolean>(false);
|
||||
|
||||
const dependentServicesData = useMemo(() => {
|
||||
const formattedDependentServicesData = getFormattedDependentServicesData(
|
||||
data?.payload?.data?.result[0].table.rows,
|
||||
);
|
||||
setCurrentRenderCount(Math.min(formattedDependentServicesData.length, 5));
|
||||
return formattedDependentServicesData;
|
||||
}, [data]);
|
||||
const handleShowMoreClick = (): void => {
|
||||
setIsExpanded((prev) => !prev);
|
||||
};
|
||||
|
||||
const renderItems = useMemo(
|
||||
() => dependentServicesData.slice(0, currentRenderCount),
|
||||
[currentRenderCount, dependentServicesData],
|
||||
const dependentServicesData = useMemo(
|
||||
(): DependentServicesData[] =>
|
||||
getFormattedDependentServicesData(data?.payload?.data?.result[0].table.rows),
|
||||
[data],
|
||||
);
|
||||
|
||||
const paginationConfig = useMemo(
|
||||
(): TablePaginationConfig => ({
|
||||
pageSize: isExpanded ? dependentServicesData.length : 5,
|
||||
hideOnSinglePage: true,
|
||||
position: ['none', 'none'],
|
||||
}),
|
||||
[isExpanded, dependentServicesData.length],
|
||||
);
|
||||
|
||||
if (isLoading || isRefetching) {
|
||||
@@ -48,56 +66,66 @@ function DependentServices({
|
||||
|
||||
return (
|
||||
<div className="top-services-content">
|
||||
<div className="top-services-title">
|
||||
<span className="title-wrapper">Dependent Services</span>
|
||||
</div>
|
||||
<div className="dependent-services-container">
|
||||
{renderItems.length === 0 ? (
|
||||
<div className="no-dependent-services-message-container">
|
||||
<div className="no-dependent-services-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
<Table
|
||||
loading={isLoading || isRefetching}
|
||||
dataSource={dependentServicesData || []}
|
||||
columns={dependentServicesColumns}
|
||||
rowClassName="table-row-dark"
|
||||
pagination={paginationConfig}
|
||||
locale={{
|
||||
emptyText:
|
||||
isLoading || isRefetching ? null : (
|
||||
<div className="no-status-code-data-message-container">
|
||||
<div className="no-status-code-data-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
|
||||
<Typography.Text className="no-dependent-services-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
renderItems.map((item) => (
|
||||
<div className="top-services-item" key={item.key}>
|
||||
<div className="top-services-item-progress">
|
||||
<div className="top-services-item-key">{item.serviceName}</div>
|
||||
<div className="top-services-item-count">{item.count}</div>
|
||||
<div
|
||||
className="top-services-item-progress-bar"
|
||||
style={{ width: `${item.percentage}%` }}
|
||||
/>
|
||||
</div>
|
||||
<div className="top-services-item-percentage">
|
||||
{item.percentage.toFixed(2)}%
|
||||
</div>
|
||||
</div>
|
||||
))
|
||||
)}
|
||||
<Typography.Text className="no-status-code-data-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
}}
|
||||
onRow={(record): { onClick: () => void; className: string } => ({
|
||||
onClick: (): void => {
|
||||
const url = new URL(
|
||||
`/services/${
|
||||
record.serviceData.serviceName &&
|
||||
record.serviceData.serviceName !== '-'
|
||||
? record.serviceData.serviceName
|
||||
: ''
|
||||
}`,
|
||||
window.location.origin,
|
||||
);
|
||||
const urlQuery = new URLSearchParams();
|
||||
urlQuery.set(QueryParams.startTime, timeRange.startTime.toString());
|
||||
urlQuery.set(QueryParams.endTime, timeRange.endTime.toString());
|
||||
url.search = urlQuery.toString();
|
||||
window.open(url.toString(), '_blank');
|
||||
},
|
||||
className: 'clickable-row',
|
||||
})}
|
||||
/>
|
||||
|
||||
{currentRenderCount < dependentServicesData.length && (
|
||||
{dependentServicesData.length > 5 && (
|
||||
<div
|
||||
className="top-services-load-more"
|
||||
onClick={(): void => setCurrentRenderCount(dependentServicesData.length)}
|
||||
onClick={handleShowMoreClick}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter') {
|
||||
setCurrentRenderCount(dependentServicesData.length);
|
||||
handleShowMoreClick();
|
||||
}
|
||||
}}
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
>
|
||||
<UnfoldVertical size={14} />
|
||||
Show more...
|
||||
{isExpanded ? 'Show less...' : 'Show more...'}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -1,8 +1,88 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Progress, Tooltip, Typography } from 'antd';
|
||||
import { getLastUsedRelativeTime } from 'container/ApiMonitoring/utils';
|
||||
import { Progress, Skeleton, Tooltip, Typography } from 'antd';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
DomainMetricsResponseRow,
|
||||
formatDomainMetricsDataForTable,
|
||||
getDomainMetricsQueryPayload,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useMemo } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
|
||||
function DomainMetrics({
|
||||
domainName,
|
||||
timeRange,
|
||||
domainListFilters,
|
||||
}: {
|
||||
domainName: string;
|
||||
timeRange: { startTime: number; endTime: number };
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const { startTime: minTime, endTime: maxTime } = timeRange;
|
||||
|
||||
const queryPayloads = useMemo(
|
||||
() =>
|
||||
getDomainMetricsQueryPayload(
|
||||
domainName,
|
||||
minTime,
|
||||
maxTime,
|
||||
domainListFilters,
|
||||
),
|
||||
[domainName, minTime, maxTime, domainListFilters],
|
||||
);
|
||||
|
||||
// Since only one query here
|
||||
const domainMetricsDataQueries = useQueries(
|
||||
queryPayloads.map((payload) => ({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_DOMAIN_METRICS_DATA,
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
staleTime: 60 * 1000, // 1 minute stale time : optimize this part
|
||||
})),
|
||||
);
|
||||
|
||||
const domainMetricsDataQuery = domainMetricsDataQueries[0];
|
||||
// [TODO] handle the case where the data is not available
|
||||
// [TODO] Format the data properly
|
||||
const {
|
||||
data: domainMetricsData,
|
||||
isLoading,
|
||||
isRefetching,
|
||||
isError,
|
||||
refetch,
|
||||
} = domainMetricsDataQuery;
|
||||
|
||||
// [TODO] Fix type error
|
||||
const formattedDomainMetricsData = useMemo(() => {
|
||||
// Safely access the data with proper type checking
|
||||
const rowData = domainMetricsData?.payload?.data?.result[0]?.table?.rows[0];
|
||||
|
||||
// Only pass the data if it matches the expected format
|
||||
return formatDomainMetricsDataForTable(
|
||||
rowData as DomainMetricsResponseRow | undefined,
|
||||
);
|
||||
}, [domainMetricsData]);
|
||||
|
||||
if (isError) {
|
||||
return (
|
||||
<div className="all-endpoints-error-state-wrapper">
|
||||
<ErrorState refetch={refetch} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function DomainMetrics({ domainData }: { domainData: any }): JSX.Element {
|
||||
return (
|
||||
<div className="domain-detail-drawer__endpoint">
|
||||
<div className="domain-details-grid">
|
||||
@@ -23,7 +103,7 @@ function DomainMetrics({ domainData }: { domainData: any }): JSX.Element {
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
ERROR RATE
|
||||
ERROR %
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
@@ -35,43 +115,62 @@ function DomainMetrics({ domainData }: { domainData: any }): JSX.Element {
|
||||
|
||||
<div className="values-row">
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
<Tooltip title={domainData.endpointCount}>
|
||||
<span className="round-metric-tag">{domainData.endpointCount}</span>
|
||||
</Tooltip>
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={formattedDomainMetricsData.endpointCount}>
|
||||
<span className="round-metric-tag">
|
||||
{formattedDomainMetricsData.endpointCount}
|
||||
</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
{/* // update the tooltip as well */}
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
<Tooltip title={domainData.latency}>
|
||||
<span className="round-metric-tag">
|
||||
{(domainData.latency / 1000).toFixed(3)}s
|
||||
</span>
|
||||
</Tooltip>
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={formattedDomainMetricsData.latency}>
|
||||
<span className="round-metric-tag">
|
||||
{(Number(formattedDomainMetricsData.latency) / 1000).toFixed(3)}s
|
||||
</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
{/* // update the tooltip as well */}
|
||||
<Typography.Text className="domain-details-metadata-value error-rate">
|
||||
<Tooltip title={domainData.errorRate}>
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number((domainData.errorRate * 100).toFixed(1))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
(domainData.errorRate * 100).toFixed(1),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
</Tooltip>
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={formattedDomainMetricsData.errorRate}>
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number(
|
||||
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
||||
)}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
{/* // update the tooltip as well */}
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
<Tooltip title={domainData.lastUsed}>
|
||||
{getLastUsedRelativeTime(domainData.lastUsed)}
|
||||
</Tooltip>
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={formattedDomainMetricsData.lastUsed}>
|
||||
{formattedDomainMetricsData.lastUsed}
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -54,7 +54,7 @@ function EndPointMetrics({
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
ERROR RATE
|
||||
ERROR %
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
@@ -89,12 +89,13 @@ function EndPointMetrics({
|
||||
) : (
|
||||
<Tooltip title={metricsData?.errorRate}>
|
||||
<Progress
|
||||
percent={Number((metricsData?.errorRate ?? 0 * 100).toFixed(1))}
|
||||
status="active"
|
||||
percent={Number(Number(metricsData?.errorRate ?? 0).toFixed(2))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
(metricsData?.errorRate ?? 0 * 100).toFixed(1),
|
||||
Number(metricsData?.errorRate ?? 0).toFixed(2),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
|
||||
@@ -52,6 +52,10 @@ function EndPointsDropDown({
|
||||
: (triggerNode): HTMLElement => triggerNode.parentNode as HTMLElement
|
||||
}
|
||||
dropdownStyle={dropdownStyle}
|
||||
allowClear
|
||||
onClear={(): void => {
|
||||
setSelectedEndPointName('');
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { OrderByPayload } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import { VIEW_TYPES, VIEWS } from '../constants';
|
||||
@@ -28,11 +29,13 @@ function ExpandedRow({
|
||||
selectedRowData,
|
||||
setSelectedEndPointName,
|
||||
setSelectedView,
|
||||
orderBy,
|
||||
}: {
|
||||
domainName: string;
|
||||
selectedRowData: EndPointsTableRowData;
|
||||
setSelectedEndPointName: (name: string) => void;
|
||||
setSelectedView: (view: VIEWS) => void;
|
||||
orderBy: OrderByPayload | null;
|
||||
}): JSX.Element {
|
||||
const nestedColumns = useMemo(() => getEndPointsColumnsConfig(false, []), []);
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
@@ -100,6 +103,7 @@ function ExpandedRow({
|
||||
? formatEndPointsDataForTable(
|
||||
groupedByRowQuery.data?.payload.data.result[0].table?.rows,
|
||||
[],
|
||||
orderBy,
|
||||
)
|
||||
: []
|
||||
}
|
||||
@@ -114,7 +118,7 @@ function ExpandedRow({
|
||||
onRow={(record): { onClick: () => void; className: string } => ({
|
||||
onClick: (): void => {
|
||||
setSelectedEndPointName(record.endpointName);
|
||||
setSelectedView(VIEW_TYPES.ENDPOINT_DETAILS);
|
||||
setSelectedView(VIEW_TYPES.ENDPOINT_STATS);
|
||||
logEvent('API Monitoring: Endpoint name row clicked', {});
|
||||
},
|
||||
className: 'expanded-clickable-row',
|
||||
|
||||
@@ -2,7 +2,15 @@ import { Card } from 'antd';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
function MetricOverTimeGraph({ widget }: { widget: Widgets }): JSX.Element {
|
||||
function MetricOverTimeGraph({
|
||||
widget,
|
||||
timeRange,
|
||||
onDragSelect,
|
||||
}: {
|
||||
widget: Widgets;
|
||||
timeRange: { startTime: number; endTime: number };
|
||||
onDragSelect: (start: number, end: number) => void;
|
||||
}): JSX.Element {
|
||||
return (
|
||||
<div>
|
||||
<Card bordered className="endpoint-details-card">
|
||||
@@ -10,8 +18,9 @@ function MetricOverTimeGraph({ widget }: { widget: Widgets }): JSX.Element {
|
||||
<GridCard
|
||||
widget={widget}
|
||||
isQueryEnabled
|
||||
onDragSelect={(): void => {}}
|
||||
onDragSelect={onDragSelect}
|
||||
customOnDragSelect={(): void => {}}
|
||||
customTimeRange={timeRange}
|
||||
/>
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
@@ -21,12 +21,9 @@ import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||
import { useCallback, useMemo, useRef, useState } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { Options } from 'uplot';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
@@ -36,8 +33,9 @@ function StatusCodeBarCharts({
|
||||
endPointStatusCodeLatencyBarChartsDataQuery,
|
||||
domainName,
|
||||
endPointName,
|
||||
domainListFilters,
|
||||
filters,
|
||||
timeRange,
|
||||
onDragSelect,
|
||||
}: {
|
||||
endPointStatusCodeBarChartsDataQuery: UseQueryResult<
|
||||
SuccessResponse<any>,
|
||||
@@ -49,8 +47,12 @@ function StatusCodeBarCharts({
|
||||
>;
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
filters: IBuilderQuery['filters'];
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
};
|
||||
onDragSelect: (start: number, end: number) => void;
|
||||
}): JSX.Element {
|
||||
// 0 : Status Code Count
|
||||
// 1 : Status Code Latency
|
||||
@@ -64,9 +66,7 @@ function StatusCodeBarCharts({
|
||||
data: endPointStatusCodeLatencyBarChartsData,
|
||||
} = endPointStatusCodeLatencyBarChartsDataQuery;
|
||||
|
||||
const { minTime, maxTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const { startTime: minTime, endTime: maxTime } = timeRange;
|
||||
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const dimensions = useResizeObserver(graphRef);
|
||||
@@ -115,25 +115,30 @@ function StatusCodeBarCharts({
|
||||
const navigateToExplorerPages = useNavigateToExplorerPages();
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const { getCustomSeries } = useGetGraphCustomSeries({
|
||||
isDarkMode,
|
||||
drawStyle: 'bars',
|
||||
colorMapping: {
|
||||
const colorMapping = useMemo(
|
||||
() => ({
|
||||
'200-299': Color.BG_FOREST_500,
|
||||
'300-399': Color.BG_AMBER_400,
|
||||
'400-499': Color.BG_CHERRY_500,
|
||||
'500-599': Color.BG_ROBIN_500,
|
||||
Other: Color.BG_SIENNA_500,
|
||||
},
|
||||
}),
|
||||
[],
|
||||
);
|
||||
|
||||
const { getCustomSeries } = useGetGraphCustomSeries({
|
||||
isDarkMode,
|
||||
drawStyle: 'bars',
|
||||
colorMapping,
|
||||
});
|
||||
|
||||
const widget = useMemo<Widgets>(
|
||||
() =>
|
||||
getStatusCodeBarChartWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
items: [...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
[domainName, endPointName, domainListFilters, filters],
|
||||
[domainName, endPointName, filters],
|
||||
);
|
||||
|
||||
const graphClickHandler = useCallback(
|
||||
@@ -182,11 +187,13 @@ function StatusCodeBarCharts({
|
||||
yAxisUnit: statusCodeWidgetInfo[currentWidgetInfoIndex].yAxisUnit,
|
||||
softMax: null,
|
||||
softMin: null,
|
||||
minTimeScale: Math.floor(minTime / 1e9),
|
||||
maxTimeScale: Math.floor(maxTime / 1e9),
|
||||
minTimeScale: minTime,
|
||||
maxTimeScale: maxTime,
|
||||
panelType: PANEL_TYPES.BAR,
|
||||
onClickHandler: graphClickHandler,
|
||||
customSeries: getCustomSeries,
|
||||
onDragSelect,
|
||||
colorMapping,
|
||||
}),
|
||||
[
|
||||
minTime,
|
||||
@@ -198,6 +205,8 @@ function StatusCodeBarCharts({
|
||||
isDarkMode,
|
||||
graphClickHandler,
|
||||
getCustomSeries,
|
||||
onDragSelect,
|
||||
colorMapping,
|
||||
],
|
||||
);
|
||||
|
||||
|
||||
@@ -1,9 +1,20 @@
|
||||
export enum VIEWS {
|
||||
ALL_ENDPOINTS = 'all_endpoints',
|
||||
ENDPOINT_DETAILS = 'endpoint_details',
|
||||
ENDPOINT_STATS = 'endpoint_stats',
|
||||
TOP_ERRORS = 'top_errors',
|
||||
}
|
||||
|
||||
export const VIEW_TYPES = {
|
||||
ALL_ENDPOINTS: VIEWS.ALL_ENDPOINTS,
|
||||
ENDPOINT_DETAILS: VIEWS.ENDPOINT_DETAILS,
|
||||
ENDPOINT_STATS: VIEWS.ENDPOINT_STATS,
|
||||
TOP_ERRORS: VIEWS.TOP_ERRORS,
|
||||
};
|
||||
|
||||
// Span attribute keys - these are the source of truth for all attribute keys
|
||||
export const SPAN_ATTRIBUTES = {
|
||||
URL_PATH: 'http.url',
|
||||
STATUS_CODE: 'status_code',
|
||||
RESPONSE_STATUS_CODE: 'response_status_code',
|
||||
SERVER_NAME: 'net.peer.name',
|
||||
SERVER_PORT: 'net.peer.port',
|
||||
} as const;
|
||||
|
||||
@@ -7,16 +7,22 @@ import logEvent from 'api/common/logEvent';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import cx from 'classnames';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import RightToolbarActions from 'container/QueryBuilder/components/ToolbarActions/RightToolbarActions';
|
||||
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import { useMemo, useState } from 'react';
|
||||
import Toolbar from 'container/Toolbar/Toolbar';
|
||||
import { useGetCompositeQueryParam } from 'hooks/queryBuilder/useGetCompositeQueryParam';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { HandleChangeQueryData } from 'types/common/operations.types';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import {
|
||||
@@ -26,20 +32,50 @@ import {
|
||||
} from '../../utils';
|
||||
import DomainDetails from './DomainDetails/DomainDetails';
|
||||
|
||||
function DomainList({
|
||||
query,
|
||||
showIP,
|
||||
handleChangeQueryData,
|
||||
}: {
|
||||
query: IBuilderQuery;
|
||||
showIP: boolean;
|
||||
handleChangeQueryData: HandleChangeQueryData;
|
||||
}): JSX.Element {
|
||||
function DomainList({ showIP }: { showIP: boolean }): JSX.Element {
|
||||
const [selectedDomainIndex, setSelectedDomainIndex] = useState<number>(-1);
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const { currentQuery, handleRunQuery } = useQueryBuilder();
|
||||
const query = useMemo(() => currentQuery?.builder?.queryData[0] || null, [
|
||||
currentQuery,
|
||||
]);
|
||||
|
||||
const { handleChangeQueryData } = useQueryOperations({
|
||||
index: 0,
|
||||
query,
|
||||
entityVersion: '',
|
||||
});
|
||||
|
||||
// initialise tab with default query.
|
||||
useShareBuilderUrl({
|
||||
...initialQueriesMap.traces,
|
||||
builder: {
|
||||
...initialQueriesMap.traces.builder,
|
||||
queryData: [
|
||||
{
|
||||
...initialQueriesMap.traces.builder.queryData[0],
|
||||
dataSource: DataSource.TRACES,
|
||||
aggregateOperator: 'noop',
|
||||
aggregateAttribute: {
|
||||
...initialQueriesMap.traces.builder.queryData[0].aggregateAttribute,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
const compositeData = useGetCompositeQueryParam();
|
||||
|
||||
const handleChangeTagFilters = useCallback(
|
||||
(value: IBuilderQuery['filters']) => {
|
||||
handleChangeQueryData('filters', value);
|
||||
},
|
||||
[handleChangeQueryData],
|
||||
);
|
||||
|
||||
const fetchApiOverview = async (): Promise<
|
||||
SuccessResponse<any> | ErrorResponse
|
||||
> => {
|
||||
@@ -49,7 +85,21 @@ function DomainList({
|
||||
show_ip: showIP,
|
||||
filters: {
|
||||
op: 'AND',
|
||||
items: query?.filters.items,
|
||||
items: [
|
||||
{
|
||||
id: '212678b9',
|
||||
key: {
|
||||
key: 'kind_string',
|
||||
dataType: 'string',
|
||||
type: '',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
},
|
||||
op: '=',
|
||||
value: 'Client',
|
||||
},
|
||||
...(compositeData?.builder?.queryData[0]?.filters.items || []),
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
@@ -70,7 +120,7 @@ function DomainList({
|
||||
};
|
||||
|
||||
const { data, isLoading, isFetching } = useQuery(
|
||||
[REACT_QUERY_KEY.GET_DOMAINS_LIST, minTime, maxTime, query, showIP],
|
||||
[REACT_QUERY_KEY.GET_DOMAINS_LIST, minTime, maxTime, compositeData, showIP],
|
||||
fetchApiOverview,
|
||||
);
|
||||
|
||||
@@ -81,20 +131,18 @@ function DomainList({
|
||||
|
||||
return (
|
||||
<section className={cx('api-module-right-section')}>
|
||||
<Toolbar
|
||||
showAutoRefresh={false}
|
||||
rightActions={<RightToolbarActions onStageRunQuery={handleRunQuery} />}
|
||||
/>
|
||||
{/* add bottom border here */}
|
||||
<div className={cx('api-monitoring-list-header')}>
|
||||
<QueryBuilderSearchV2
|
||||
query={query}
|
||||
onChange={(searchFilters): void =>
|
||||
handleChangeQueryData('filters', searchFilters)
|
||||
}
|
||||
onChange={handleChangeTagFilters}
|
||||
placeholder="Search filters..."
|
||||
hardcodedAttributeKeys={hardcodedAttributeKeys}
|
||||
/>
|
||||
<DateTimeSelectionV2
|
||||
showAutoRefresh={false}
|
||||
showRefreshText={false}
|
||||
hideShareModal
|
||||
/>
|
||||
</div>
|
||||
<Table
|
||||
className={cx('api-monitoring-domain-list-table')}
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
.api-quick-filters-header {
|
||||
padding: 12px;
|
||||
border-bottom: 1px solid var(--bg-slate-400);
|
||||
border-right: 1px solid var(--bg-slate-400);
|
||||
|
||||
display: flex;
|
||||
align-items: center;
|
||||
@@ -24,6 +25,10 @@
|
||||
flex-direction: column;
|
||||
width: 100%;
|
||||
|
||||
.toolbar {
|
||||
border-bottom: 1px solid var(--bg-slate-400);
|
||||
}
|
||||
|
||||
.api-monitoring-list-header {
|
||||
width: 100%;
|
||||
padding: 8px;
|
||||
|
||||
@@ -7,12 +7,8 @@ import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import QuickFilters from 'components/QuickFilters/QuickFilters';
|
||||
import { QuickFiltersSource } from 'components/QuickFilters/types';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { useEffect, useState } from 'react';
|
||||
|
||||
import { ApiMonitoringQuickFiltersConfig } from '../utils';
|
||||
import DomainList from './Domains/DomainList';
|
||||
@@ -20,39 +16,10 @@ import DomainList from './Domains/DomainList';
|
||||
function Explorer(): JSX.Element {
|
||||
const [showIP, setShowIP] = useState<boolean>(true);
|
||||
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
useEffect(() => {
|
||||
logEvent('API Monitoring: Landing page visited', {});
|
||||
}, []);
|
||||
|
||||
const { handleChangeQueryData } = useQueryOperations({
|
||||
index: 0,
|
||||
query: currentQuery.builder.queryData[0],
|
||||
entityVersion: '',
|
||||
});
|
||||
|
||||
const updatedCurrentQuery = useMemo(
|
||||
() => ({
|
||||
...currentQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...currentQuery.builder.queryData[0],
|
||||
dataSource: DataSource.TRACES,
|
||||
aggregateOperator: 'noop',
|
||||
aggregateAttribute: {
|
||||
...currentQuery.builder.queryData[0].aggregateAttribute,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
[currentQuery],
|
||||
);
|
||||
const query = updatedCurrentQuery?.builder?.queryData[0] || null;
|
||||
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<div className={cx('api-monitoring-page', 'filter-visible')}>
|
||||
@@ -83,16 +50,9 @@ function Explorer(): JSX.Element {
|
||||
source={QuickFiltersSource.API_MONITORING}
|
||||
config={ApiMonitoringQuickFiltersConfig}
|
||||
handleFilterVisibilityChange={(): void => {}}
|
||||
onFilterChange={(query: Query): void =>
|
||||
handleChangeQueryData('filters', query.builder.queryData[0].filters)
|
||||
}
|
||||
/>
|
||||
</section>
|
||||
<DomainList
|
||||
query={query}
|
||||
showIP={showIP}
|
||||
handleChangeQueryData={handleChangeQueryData}
|
||||
/>
|
||||
<DomainList showIP={showIP} />
|
||||
</div>
|
||||
</Sentry.ErrorBoundary>
|
||||
);
|
||||
|
||||
@@ -0,0 +1,190 @@
|
||||
import { cleanup, fireEvent, render, screen } from '@testing-library/react';
|
||||
import {
|
||||
getAllEndpointsWidgetData,
|
||||
getGroupByFiltersFromGroupByValues,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { useGetAggregateKeys } from 'hooks/queryBuilder/useGetAggregateKeys';
|
||||
|
||||
import AllEndPoints from '../Explorer/Domains/DomainDetails/AllEndPoints';
|
||||
import {
|
||||
SPAN_ATTRIBUTES,
|
||||
VIEWS,
|
||||
} from '../Explorer/Domains/DomainDetails/constants';
|
||||
|
||||
// Mock the dependencies
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
getAllEndpointsWidgetData: jest.fn(),
|
||||
getGroupByFiltersFromGroupByValues: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('container/GridCardLayout/GridCard', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(({ customOnRowClick }) => (
|
||||
<div data-testid="grid-card-mock">
|
||||
<button
|
||||
type="button"
|
||||
data-testid="row-click-button"
|
||||
onClick={(): void =>
|
||||
customOnRowClick({ [SPAN_ATTRIBUTES.URL_PATH]: '/api/test' })
|
||||
}
|
||||
>
|
||||
Click Row
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
jest.mock(
|
||||
'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(({ onChange }) => (
|
||||
<div data-testid="query-builder-mock">
|
||||
<button
|
||||
type="button"
|
||||
data-testid="filter-change-button"
|
||||
onClick={(): void =>
|
||||
onChange({
|
||||
items: [{ id: 'test', key: 'test', op: '=', value: 'test' }],
|
||||
op: 'AND',
|
||||
})
|
||||
}
|
||||
>
|
||||
Change Filter
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock('hooks/queryBuilder/useGetAggregateKeys', () => ({
|
||||
useGetAggregateKeys: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('antd', () => {
|
||||
const originalModule = jest.requireActual('antd');
|
||||
return {
|
||||
...originalModule,
|
||||
Select: jest.fn().mockImplementation(({ onChange }) => (
|
||||
<div data-testid="select-mock">
|
||||
<button
|
||||
data-testid="select-change-button"
|
||||
type="button"
|
||||
onClick={(): void => onChange(['http.status_code'])}
|
||||
>
|
||||
Change GroupBy
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
};
|
||||
});
|
||||
|
||||
describe('AllEndPoints', () => {
|
||||
const mockProps = {
|
||||
domainName: 'test-domain',
|
||||
setSelectedEndPointName: jest.fn(),
|
||||
setSelectedView: jest.fn(),
|
||||
groupBy: [],
|
||||
setGroupBy: jest.fn(),
|
||||
timeRange: {
|
||||
startTime: 1609459200000,
|
||||
endTime: 1609545600000,
|
||||
},
|
||||
initialFilters: { op: 'AND', items: [] },
|
||||
setInitialFiltersEndPointStats: jest.fn(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Setup mock implementations
|
||||
(useGetAggregateKeys as jest.Mock).mockReturnValue({
|
||||
data: {
|
||||
payload: {
|
||||
attributeKeys: [
|
||||
{
|
||||
key: 'http.status_code',
|
||||
dataType: 'string',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: '',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
});
|
||||
|
||||
(getAllEndpointsWidgetData as jest.Mock).mockReturnValue({
|
||||
id: 'test-widget',
|
||||
title: 'Endpoint Overview',
|
||||
description: 'Endpoint Overview',
|
||||
panelTypes: 'table',
|
||||
queryData: [],
|
||||
});
|
||||
|
||||
(getGroupByFiltersFromGroupByValues as jest.Mock).mockReturnValue({
|
||||
items: [{ id: 'group-filter', key: 'status', op: '=', value: '200' }],
|
||||
op: 'AND',
|
||||
});
|
||||
});
|
||||
|
||||
// Add cleanup after each test
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
});
|
||||
|
||||
it('renders component correctly', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<AllEndPoints {...mockProps} />);
|
||||
|
||||
// Verify basic component rendering
|
||||
expect(screen.getByText('Group by')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('query-builder-mock')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('select-mock')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('grid-card-mock')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('handles filter changes', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<AllEndPoints {...mockProps} />);
|
||||
|
||||
// Trigger filter change
|
||||
fireEvent.click(screen.getByTestId('filter-change-button'));
|
||||
|
||||
// Check if getAllEndpointsWidgetData was called with updated filters
|
||||
expect(getAllEndpointsWidgetData).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
'test-domain',
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([expect.objectContaining({ id: 'test' })]),
|
||||
op: 'AND',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('handles group by changes', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<AllEndPoints {...mockProps} />);
|
||||
|
||||
// Trigger group by change
|
||||
fireEvent.click(screen.getByTestId('select-change-button'));
|
||||
|
||||
// Check if setGroupBy was called with updated group by value
|
||||
expect(mockProps.setGroupBy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('handles row click in grid card', async () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<AllEndPoints {...mockProps} />);
|
||||
|
||||
// Trigger row click
|
||||
fireEvent.click(screen.getByTestId('row-click-button'));
|
||||
|
||||
// Check if proper functions were called
|
||||
expect(mockProps.setSelectedEndPointName).toHaveBeenCalledWith('/api/test');
|
||||
expect(mockProps.setSelectedView).toHaveBeenCalledWith(VIEWS.ENDPOINT_STATS);
|
||||
expect(mockProps.setInitialFiltersEndPointStats).toHaveBeenCalled();
|
||||
expect(getGroupByFiltersFromGroupByValues).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,366 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { getFormattedDependentServicesData } from 'container/ApiMonitoring/utils';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import DependentServices from '../Explorer/Domains/DomainDetails/components/DependentServices';
|
||||
import ErrorState from '../Explorer/Domains/DomainDetails/components/ErrorState';
|
||||
|
||||
// Create a partial mock of the UseQueryResult interface for testing
|
||||
interface MockQueryResult {
|
||||
isLoading: boolean;
|
||||
isRefetching: boolean;
|
||||
isError: boolean;
|
||||
data?: any;
|
||||
refetch: () => void;
|
||||
}
|
||||
|
||||
// Mock the utility function
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
getFormattedDependentServicesData: jest.fn(),
|
||||
dependentServicesColumns: [
|
||||
{ title: 'Dependent Services', dataIndex: 'serviceData', key: 'serviceData' },
|
||||
{ title: 'AVG. LATENCY', dataIndex: 'latency', key: 'latency' },
|
||||
{ title: 'ERROR %', dataIndex: 'errorPercentage', key: 'errorPercentage' },
|
||||
{ title: 'AVG. RATE', dataIndex: 'rate', key: 'rate' },
|
||||
],
|
||||
}));
|
||||
|
||||
// Mock the ErrorState component
|
||||
jest.mock('../Explorer/Domains/DomainDetails/components/ErrorState', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(({ refetch }) => (
|
||||
<div data-testid="error-state-mock">
|
||||
<button type="button" data-testid="refetch-button" onClick={refetch}>
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
// Mock antd components
|
||||
jest.mock('antd', () => {
|
||||
const originalModule = jest.requireActual('antd');
|
||||
return {
|
||||
...originalModule,
|
||||
Table: jest
|
||||
.fn()
|
||||
.mockImplementation(({ dataSource, loading, pagination, onRow }) => (
|
||||
<div data-testid="table-mock">
|
||||
<div data-testid="loading-state">
|
||||
{loading ? 'Loading' : 'Not Loading'}
|
||||
</div>
|
||||
<div data-testid="row-count">{dataSource?.length || 0}</div>
|
||||
<div data-testid="page-size">{pagination?.pageSize}</div>
|
||||
{dataSource?.map((item: any, index: number) => (
|
||||
<div
|
||||
key={`service-${item.key || index}`}
|
||||
data-testid={`table-row-${index}`}
|
||||
onClick={(): void => onRow?.(item)?.onClick?.()}
|
||||
onKeyDown={(e: React.KeyboardEvent<HTMLDivElement>): void => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
onRow?.(item)?.onClick?.();
|
||||
}
|
||||
}}
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
>
|
||||
{item.serviceData.serviceName}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)),
|
||||
Skeleton: jest
|
||||
.fn()
|
||||
.mockImplementation(() => <div data-testid="skeleton-mock" />),
|
||||
Typography: {
|
||||
Text: jest
|
||||
.fn()
|
||||
.mockImplementation(({ children }) => (
|
||||
<div data-testid="typography-text">{children}</div>
|
||||
)),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
describe('DependentServices', () => {
|
||||
// Sample mock data to use in tests
|
||||
const mockDependentServicesData = [
|
||||
{
|
||||
key: 'service1',
|
||||
serviceData: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
serviceName: 'auth-service',
|
||||
count: 500,
|
||||
percentage: 62.5,
|
||||
},
|
||||
latency: 120,
|
||||
rate: '15',
|
||||
errorPercentage: '2.5',
|
||||
},
|
||||
{
|
||||
key: 'service2',
|
||||
serviceData: {
|
||||
serviceName: 'db-service',
|
||||
count: 300,
|
||||
percentage: 37.5,
|
||||
},
|
||||
latency: 80,
|
||||
rate: '10',
|
||||
errorPercentage: '1.2',
|
||||
},
|
||||
];
|
||||
|
||||
// Default props for tests
|
||||
const mockTimeRange = {
|
||||
startTime: 1609459200000,
|
||||
endTime: 1609545600000,
|
||||
};
|
||||
|
||||
const refetchFn = jest.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
(getFormattedDependentServicesData as jest.Mock).mockReturnValue(
|
||||
mockDependentServicesData,
|
||||
);
|
||||
});
|
||||
|
||||
it('renders loading state correctly', () => {
|
||||
// Arrange
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: true,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
const { container } = render(
|
||||
<DependentServices
|
||||
dependentServicesQuery={mockQuery as any}
|
||||
timeRange={mockTimeRange}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(container.querySelector('.ant-skeleton')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders error state correctly', () => {
|
||||
// Arrange
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: true,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<DependentServices
|
||||
dependentServicesQuery={mockQuery as any}
|
||||
timeRange={mockTimeRange}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('error-state-mock')).toBeInTheDocument();
|
||||
expect(ErrorState).toHaveBeenCalledWith(
|
||||
{ refetch: expect.any(Function) },
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('renders data correctly when loaded', () => {
|
||||
// Arrange
|
||||
const mockData = {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{
|
||||
data: {
|
||||
'service.name': 'auth-service',
|
||||
A: '500',
|
||||
B: '120000000',
|
||||
C: '15',
|
||||
F1: '2.5',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<DependentServices
|
||||
dependentServicesQuery={mockQuery as any}
|
||||
timeRange={mockTimeRange}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(getFormattedDependentServicesData).toHaveBeenCalledWith(
|
||||
mockData.payload.data.result[0].table.rows,
|
||||
);
|
||||
|
||||
// Check the table was rendered with the correct data
|
||||
expect(screen.getByTestId('table-mock')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('loading-state')).toHaveTextContent('Not Loading');
|
||||
expect(screen.getByTestId('row-count')).toHaveTextContent('2');
|
||||
|
||||
// Default (collapsed) pagination should be 5
|
||||
expect(screen.getByTestId('page-size')).toHaveTextContent('5');
|
||||
});
|
||||
|
||||
it('handles refetching state correctly', () => {
|
||||
// Arrange
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: true,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
const { container } = render(
|
||||
<DependentServices
|
||||
dependentServicesQuery={mockQuery as any}
|
||||
timeRange={mockTimeRange}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(container.querySelector('.ant-skeleton')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('handles row click correctly', () => {
|
||||
// Mock window.open
|
||||
const originalOpen = window.open;
|
||||
window.open = jest.fn();
|
||||
|
||||
// Arrange
|
||||
const mockData = {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{
|
||||
data: {
|
||||
'service.name': 'auth-service',
|
||||
A: '500',
|
||||
B: '120000000',
|
||||
C: '15',
|
||||
F1: '2.5',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<DependentServices
|
||||
dependentServicesQuery={mockQuery as any}
|
||||
timeRange={mockTimeRange}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Click on the first row
|
||||
fireEvent.click(screen.getByTestId('table-row-0'));
|
||||
|
||||
// Assert
|
||||
expect(window.open).toHaveBeenCalledWith(
|
||||
expect.stringContaining('/services/auth-service'),
|
||||
'_blank',
|
||||
);
|
||||
|
||||
// Restore original window.open
|
||||
window.open = originalOpen;
|
||||
});
|
||||
|
||||
it('expands table when showing more', () => {
|
||||
// Set up more than 5 items so the "show more" button appears
|
||||
const moreItems = Array(8)
|
||||
.fill(0)
|
||||
.map((_, index) => ({
|
||||
key: `service${index}`,
|
||||
serviceData: {
|
||||
serviceName: `service-${index}`,
|
||||
count: 100,
|
||||
percentage: 12.5,
|
||||
},
|
||||
latency: 100,
|
||||
rate: '10',
|
||||
errorPercentage: '1',
|
||||
}));
|
||||
|
||||
(getFormattedDependentServicesData as jest.Mock).mockReturnValue(moreItems);
|
||||
|
||||
const mockData = {
|
||||
payload: { data: { result: [{ table: { rows: [] } }] } },
|
||||
} as SuccessResponse<any>;
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Render the component
|
||||
render(
|
||||
<DependentServices
|
||||
dependentServicesQuery={mockQuery as any}
|
||||
timeRange={mockTimeRange}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Find the "Show more" button (using container query since it might not have a testId)
|
||||
const showMoreButton = screen.getByText(/Show more/i);
|
||||
expect(showMoreButton).toBeInTheDocument();
|
||||
|
||||
// Initial page size should be 5
|
||||
expect(screen.getByTestId('page-size')).toHaveTextContent('5');
|
||||
|
||||
// Click the button to expand
|
||||
fireEvent.click(showMoreButton);
|
||||
|
||||
// Page size should now be the full data length
|
||||
expect(screen.getByTestId('page-size')).toHaveTextContent('8');
|
||||
|
||||
// Text should have changed to "Show less"
|
||||
expect(screen.getByText(/Show less/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,386 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import {
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||
extractPortAndEndpoint,
|
||||
getEndPointDetailsQueryPayload,
|
||||
getLatencyOverTimeWidgetData,
|
||||
getRateOverTimeWidgetData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import {
|
||||
CustomTimeType,
|
||||
Time,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { useQueries } from 'react-query';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
TagFilter,
|
||||
TagFilterItem,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
|
||||
import EndPointDetails from '../Explorer/Domains/DomainDetails/EndPointDetails';
|
||||
|
||||
// Mock dependencies
|
||||
jest.mock('react-query', () => ({
|
||||
useQueries: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY: [
|
||||
'endPointMetricsData',
|
||||
'endPointStatusCodeData',
|
||||
'endPointDropDownData',
|
||||
'endPointDependentServicesData',
|
||||
'endPointStatusCodeBarChartsData',
|
||||
'endPointStatusCodeLatencyBarChartsData',
|
||||
],
|
||||
extractPortAndEndpoint: jest.fn(),
|
||||
getEndPointDetailsQueryPayload: jest.fn(),
|
||||
getLatencyOverTimeWidgetData: jest.fn(),
|
||||
getRateOverTimeWidgetData: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock(
|
||||
'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(({ onChange }) => (
|
||||
<div data-testid="query-builder-search">
|
||||
<button
|
||||
type="button"
|
||||
data-testid="filter-change-button"
|
||||
onClick={(): void =>
|
||||
onChange({
|
||||
items: [
|
||||
{
|
||||
id: 'test-filter',
|
||||
key: {
|
||||
key: 'test.key',
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
},
|
||||
op: '=',
|
||||
value: 'test-value',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
})
|
||||
}
|
||||
>
|
||||
Change Filter
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}),
|
||||
);
|
||||
|
||||
// Mock all child components to simplify testing
|
||||
jest.mock(
|
||||
'../Explorer/Domains/DomainDetails/components/EndPointMetrics',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest
|
||||
.fn()
|
||||
.mockImplementation(() => (
|
||||
<div data-testid="endpoint-metrics">EndPoint Metrics</div>
|
||||
)),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'../Explorer/Domains/DomainDetails/components/EndPointsDropDown',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(({ setSelectedEndPointName }) => (
|
||||
<div data-testid="endpoints-dropdown">
|
||||
<button
|
||||
type="button"
|
||||
data-testid="select-endpoint-button"
|
||||
onClick={(): void => setSelectedEndPointName('/api/new-endpoint')}
|
||||
>
|
||||
Select Endpoint
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'../Explorer/Domains/DomainDetails/components/DependentServices',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest
|
||||
.fn()
|
||||
.mockImplementation(() => (
|
||||
<div data-testid="dependent-services">Dependent Services</div>
|
||||
)),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'../Explorer/Domains/DomainDetails/components/StatusCodeBarCharts',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest
|
||||
.fn()
|
||||
.mockImplementation(() => (
|
||||
<div data-testid="status-code-bar-charts">Status Code Bar Charts</div>
|
||||
)),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'../Explorer/Domains/DomainDetails/components/StatusCodeTable',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest
|
||||
.fn()
|
||||
.mockImplementation(() => (
|
||||
<div data-testid="status-code-table">Status Code Table</div>
|
||||
)),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'../Explorer/Domains/DomainDetails/components/MetricOverTimeGraph',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest
|
||||
.fn()
|
||||
.mockImplementation(({ widget }) => (
|
||||
<div data-testid={`metric-graph-${widget.title}`}>{widget.title} Graph</div>
|
||||
)),
|
||||
}),
|
||||
);
|
||||
|
||||
describe('EndPointDetails Component', () => {
|
||||
const mockQueryResults = Array(6).fill({
|
||||
data: { data: [] },
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
error: null,
|
||||
});
|
||||
|
||||
const mockProps = {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
domainName: 'test-domain',
|
||||
endPointName: '/api/test',
|
||||
setSelectedEndPointName: jest.fn(),
|
||||
initialFilters: { items: [], op: 'AND' } as TagFilter,
|
||||
timeRange: {
|
||||
startTime: 1609459200000,
|
||||
endTime: 1609545600000,
|
||||
},
|
||||
handleTimeChange: jest.fn() as (
|
||||
interval: Time | CustomTimeType,
|
||||
dateTimeRange?: [number, number],
|
||||
) => void,
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
(extractPortAndEndpoint as jest.Mock).mockReturnValue({
|
||||
port: '8080',
|
||||
endpoint: '/api/test',
|
||||
});
|
||||
|
||||
(getEndPointDetailsQueryPayload as jest.Mock).mockReturnValue([
|
||||
{ id: 'query1', label: 'Query 1' },
|
||||
{ id: 'query2', label: 'Query 2' },
|
||||
{ id: 'query3', label: 'Query 3' },
|
||||
{ id: 'query4', label: 'Query 4' },
|
||||
{ id: 'query5', label: 'Query 5' },
|
||||
{ id: 'query6', label: 'Query 6' },
|
||||
]);
|
||||
|
||||
(getRateOverTimeWidgetData as jest.Mock).mockReturnValue({
|
||||
title: 'Rate Over Time',
|
||||
id: 'rate-widget',
|
||||
});
|
||||
|
||||
(getLatencyOverTimeWidgetData as jest.Mock).mockReturnValue({
|
||||
title: 'Latency Over Time',
|
||||
id: 'latency-widget',
|
||||
});
|
||||
|
||||
(useQueries as jest.Mock).mockReturnValue(mockQueryResults);
|
||||
});
|
||||
|
||||
it('renders the component correctly', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointDetails {...mockProps} />);
|
||||
|
||||
// Check all major components are rendered
|
||||
expect(screen.getByTestId('query-builder-search')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('endpoints-dropdown')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('endpoint-metrics')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('dependent-services')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('status-code-bar-charts')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('status-code-table')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('metric-graph-Rate Over Time')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByTestId('metric-graph-Latency Over Time'),
|
||||
).toBeInTheDocument();
|
||||
|
||||
// Check endpoint metadata is displayed
|
||||
expect(screen.getByText(/8080/i)).toBeInTheDocument();
|
||||
expect(screen.getByText('/api/test')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls getEndPointDetailsQueryPayload with correct parameters', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointDetails {...mockProps} />);
|
||||
|
||||
expect(getEndPointDetailsQueryPayload).toHaveBeenCalledWith(
|
||||
'test-domain',
|
||||
mockProps.timeRange.startTime,
|
||||
mockProps.timeRange.endTime,
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
op: 'AND',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('adds endpoint filter to initial filters', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointDetails {...mockProps} />);
|
||||
|
||||
expect(getEndPointDetailsQueryPayload).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.anything(),
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('updates filters when QueryBuilderSearch changes', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointDetails {...mockProps} />);
|
||||
|
||||
// Trigger filter change
|
||||
fireEvent.click(screen.getByTestId('filter-change-button'));
|
||||
|
||||
// Check that filters were updated in subsequent calls to utility functions
|
||||
expect(getEndPointDetailsQueryPayload).toHaveBeenCalledTimes(2);
|
||||
expect(getEndPointDetailsQueryPayload).toHaveBeenLastCalledWith(
|
||||
expect.anything(),
|
||||
expect.anything(),
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: 'test.key' }),
|
||||
value: 'test-value',
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('handles endpoint dropdown selection', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointDetails {...mockProps} />);
|
||||
|
||||
// Trigger endpoint selection
|
||||
fireEvent.click(screen.getByTestId('select-endpoint-button'));
|
||||
|
||||
// Check if endpoint was updated
|
||||
expect(mockProps.setSelectedEndPointName).toHaveBeenCalledWith(
|
||||
'/api/new-endpoint',
|
||||
);
|
||||
});
|
||||
|
||||
it('does not display dependent services when service filter is applied', () => {
|
||||
const propsWithServiceFilter = {
|
||||
...mockProps,
|
||||
initialFilters: {
|
||||
items: [
|
||||
{
|
||||
id: 'service-filter',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
},
|
||||
op: '=',
|
||||
value: 'test-service',
|
||||
},
|
||||
] as TagFilterItem[],
|
||||
op: 'AND',
|
||||
} as TagFilter,
|
||||
};
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointDetails {...propsWithServiceFilter} />);
|
||||
|
||||
// Dependent services should not be displayed
|
||||
expect(screen.queryByTestId('dependent-services')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('passes the correct parameters to widget data generators', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointDetails {...mockProps} />);
|
||||
|
||||
expect(getRateOverTimeWidgetData).toHaveBeenCalledWith(
|
||||
'test-domain',
|
||||
'/api/test',
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
);
|
||||
|
||||
expect(getLatencyOverTimeWidgetData).toHaveBeenCalledWith(
|
||||
'test-domain',
|
||||
'/api/test',
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('generates correct query parameters for useQueries', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointDetails {...mockProps} />);
|
||||
|
||||
// Check if useQueries was called with correct parameters
|
||||
expect(useQueries).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
queryKey: expect.arrayContaining([END_POINT_DETAILS_QUERY_KEYS_ARRAY[0]]),
|
||||
}),
|
||||
expect.objectContaining({
|
||||
queryKey: expect.arrayContaining([END_POINT_DETAILS_QUERY_KEYS_ARRAY[1]]),
|
||||
}),
|
||||
// ... and so on for other queries
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,211 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { getFormattedEndPointMetricsData } from 'container/ApiMonitoring/utils';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import EndPointMetrics from '../Explorer/Domains/DomainDetails/components/EndPointMetrics';
|
||||
import ErrorState from '../Explorer/Domains/DomainDetails/components/ErrorState';
|
||||
|
||||
// Create a partial mock of the UseQueryResult interface for testing
|
||||
interface MockQueryResult {
|
||||
isLoading: boolean;
|
||||
isRefetching: boolean;
|
||||
isError: boolean;
|
||||
data?: any;
|
||||
refetch: () => void;
|
||||
}
|
||||
|
||||
// Mock the utils function
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
getFormattedEndPointMetricsData: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock the ErrorState component
|
||||
jest.mock('../Explorer/Domains/DomainDetails/components/ErrorState', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(({ refetch }) => (
|
||||
<div data-testid="error-state-mock">
|
||||
<button type="button" data-testid="refetch-button" onClick={refetch}>
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
// Mock antd components
|
||||
jest.mock('antd', () => {
|
||||
const originalModule = jest.requireActual('antd');
|
||||
return {
|
||||
...originalModule,
|
||||
Progress: jest
|
||||
.fn()
|
||||
.mockImplementation(() => <div data-testid="progress-bar-mock" />),
|
||||
Skeleton: {
|
||||
Button: jest
|
||||
.fn()
|
||||
.mockImplementation(() => <div data-testid="skeleton-button-mock" />),
|
||||
},
|
||||
Tooltip: jest
|
||||
.fn()
|
||||
.mockImplementation(({ children }) => (
|
||||
<div data-testid="tooltip-mock">{children}</div>
|
||||
)),
|
||||
Typography: {
|
||||
Text: jest.fn().mockImplementation(({ children, className }) => (
|
||||
<div data-testid={`typography-${className}`} className={className}>
|
||||
{children}
|
||||
</div>
|
||||
)),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
describe('EndPointMetrics', () => {
|
||||
// Common metric data to use in tests
|
||||
const mockMetricsData = {
|
||||
key: 'test-key',
|
||||
rate: '42',
|
||||
latency: 99,
|
||||
errorRate: 5.5,
|
||||
lastUsed: '5 minutes ago',
|
||||
};
|
||||
|
||||
// Basic props for tests
|
||||
const refetchFn = jest.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
(getFormattedEndPointMetricsData as jest.Mock).mockReturnValue(
|
||||
mockMetricsData,
|
||||
);
|
||||
});
|
||||
|
||||
it('renders loading state correctly', () => {
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: true,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Verify skeleton loaders are visible
|
||||
const skeletonElements = screen.getAllByTestId('skeleton-button-mock');
|
||||
expect(skeletonElements.length).toBe(4);
|
||||
|
||||
// Verify labels are visible even during loading
|
||||
expect(screen.getByText('Rate')).toBeInTheDocument();
|
||||
expect(screen.getByText('AVERAGE LATENCY')).toBeInTheDocument();
|
||||
expect(screen.getByText('ERROR %')).toBeInTheDocument();
|
||||
expect(screen.getByText('LAST USED')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders error state correctly', () => {
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: true,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Verify error state is shown
|
||||
expect(screen.getByTestId('error-state-mock')).toBeInTheDocument();
|
||||
expect(ErrorState).toHaveBeenCalledWith(
|
||||
{ refetch: expect.any(Function) },
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('renders data correctly when loaded', () => {
|
||||
const mockData = {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{ data: { A: '42', B: '99000000', D: '1609459200000000', F1: '5.5' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Verify the utils function was called with the data
|
||||
expect(getFormattedEndPointMetricsData).toHaveBeenCalledWith(
|
||||
mockData.payload.data.result[0].table.rows,
|
||||
);
|
||||
|
||||
// Verify data is displayed
|
||||
expect(
|
||||
screen.getByText(`${mockMetricsData.rate} ops/sec`),
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByText(`${mockMetricsData.latency}ms`)).toBeInTheDocument();
|
||||
expect(screen.getByText(mockMetricsData.lastUsed)).toBeInTheDocument();
|
||||
expect(screen.getByTestId('progress-bar-mock')).toBeInTheDocument(); // For error rate
|
||||
});
|
||||
|
||||
it('handles refetching state correctly', () => {
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: true,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Verify skeleton loaders are visible during refetching
|
||||
const skeletonElements = screen.getAllByTestId('skeleton-button-mock');
|
||||
expect(skeletonElements.length).toBe(4);
|
||||
});
|
||||
|
||||
it('handles null metrics data gracefully', () => {
|
||||
// Mock the utils function to return null to simulate missing data
|
||||
(getFormattedEndPointMetricsData as jest.Mock).mockReturnValue(null);
|
||||
|
||||
const mockData = {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Even with null data, the component should render without crashing
|
||||
expect(screen.getByText('Rate')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,221 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { getFormattedEndPointDropDownData } from 'container/ApiMonitoring/utils';
|
||||
|
||||
import EndPointsDropDown from '../Explorer/Domains/DomainDetails/components/EndPointsDropDown';
|
||||
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
|
||||
|
||||
// Mock the Select component from antd
|
||||
jest.mock('antd', () => {
|
||||
const originalModule = jest.requireActual('antd');
|
||||
return {
|
||||
...originalModule,
|
||||
Select: jest
|
||||
.fn()
|
||||
.mockImplementation(({ value, loading, onChange, options, onClear }) => (
|
||||
<div data-testid="mock-select">
|
||||
<div data-testid="select-value">{value}</div>
|
||||
<div data-testid="select-loading">
|
||||
{loading ? 'loading' : 'not-loading'}
|
||||
</div>
|
||||
<select
|
||||
data-testid="select-element"
|
||||
value={value || ''}
|
||||
onChange={(e): void => onChange(e.target.value)}
|
||||
>
|
||||
<option value="">Select...</option>
|
||||
{options?.map((option: { value: string; label: string; key: string }) => (
|
||||
<option key={option.value} value={option.value}>
|
||||
{option.label}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
<button data-testid="select-clear-button" type="button" onClick={onClear}>
|
||||
Clear
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the utilities
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
getFormattedEndPointDropDownData: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('EndPointsDropDown Component', () => {
|
||||
const mockEndPoints = [
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
{ key: '1', value: '/api/endpoint1', label: '/api/endpoint1' },
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
{ key: '2', value: '/api/endpoint2', label: '/api/endpoint2' },
|
||||
];
|
||||
|
||||
const mockSetSelectedEndPointName = jest.fn();
|
||||
|
||||
// Create a mock that satisfies the UseQueryResult interface
|
||||
const createMockQueryResult = (overrides: any = {}): any => ({
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
dataUpdatedAt: 0,
|
||||
error: null,
|
||||
errorUpdatedAt: 0,
|
||||
failureCount: 0,
|
||||
isError: false,
|
||||
isFetched: true,
|
||||
isFetchedAfterMount: true,
|
||||
isFetching: false,
|
||||
isIdle: false,
|
||||
isLoading: false,
|
||||
isLoadingError: false,
|
||||
isPlaceholderData: false,
|
||||
isPreviousData: false,
|
||||
isRefetchError: false,
|
||||
isRefetching: false,
|
||||
isStale: false,
|
||||
isSuccess: true,
|
||||
refetch: jest.fn(),
|
||||
remove: jest.fn(),
|
||||
status: 'success',
|
||||
...overrides,
|
||||
});
|
||||
|
||||
const defaultProps = {
|
||||
selectedEndPointName: '',
|
||||
setSelectedEndPointName: mockSetSelectedEndPointName,
|
||||
endPointDropDownDataQuery: createMockQueryResult(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
(getFormattedEndPointDropDownData as jest.Mock).mockReturnValue(
|
||||
mockEndPoints,
|
||||
);
|
||||
});
|
||||
|
||||
it('renders the component correctly', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointsDropDown {...defaultProps} />);
|
||||
|
||||
expect(screen.getByTestId('mock-select')).toBeInTheDocument();
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
expect(screen.getByTestId('select-loading')).toHaveTextContent('not-loading');
|
||||
});
|
||||
|
||||
it('shows loading state when data is loading', () => {
|
||||
const loadingProps = {
|
||||
...defaultProps,
|
||||
endPointDropDownDataQuery: createMockQueryResult({
|
||||
isLoading: true,
|
||||
}),
|
||||
};
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointsDropDown {...loadingProps} />);
|
||||
|
||||
expect(screen.getByTestId('select-loading')).toHaveTextContent('loading');
|
||||
});
|
||||
|
||||
it('shows loading state when data is fetching', () => {
|
||||
const fetchingProps = {
|
||||
...defaultProps,
|
||||
endPointDropDownDataQuery: createMockQueryResult({
|
||||
isFetching: true,
|
||||
}),
|
||||
};
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointsDropDown {...fetchingProps} />);
|
||||
|
||||
expect(screen.getByTestId('select-loading')).toHaveTextContent('loading');
|
||||
});
|
||||
|
||||
it('displays the selected endpoint', () => {
|
||||
const selectedProps = {
|
||||
...defaultProps,
|
||||
selectedEndPointName: '/api/endpoint1',
|
||||
};
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointsDropDown {...selectedProps} />);
|
||||
|
||||
expect(screen.getByTestId('select-value')).toHaveTextContent(
|
||||
'/api/endpoint1',
|
||||
);
|
||||
});
|
||||
|
||||
it('calls setSelectedEndPointName when an option is selected', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointsDropDown {...defaultProps} />);
|
||||
|
||||
// Get the select element and change its value
|
||||
const selectElement = screen.getByTestId('select-element');
|
||||
fireEvent.change(selectElement, { target: { value: '/api/endpoint2' } });
|
||||
|
||||
expect(mockSetSelectedEndPointName).toHaveBeenCalledWith('/api/endpoint2');
|
||||
});
|
||||
|
||||
it('calls setSelectedEndPointName with empty string when cleared', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointsDropDown {...defaultProps} />);
|
||||
|
||||
// Click the clear button
|
||||
const clearButton = screen.getByTestId('select-clear-button');
|
||||
fireEvent.click(clearButton);
|
||||
|
||||
expect(mockSetSelectedEndPointName).toHaveBeenCalledWith('');
|
||||
});
|
||||
|
||||
it('passes dropdown style prop correctly', () => {
|
||||
const styleProps = {
|
||||
...defaultProps,
|
||||
dropdownStyle: { maxHeight: '200px' },
|
||||
};
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointsDropDown {...styleProps} />);
|
||||
|
||||
// We can't easily test style props in our mock, but at least ensure the component rendered
|
||||
expect(screen.getByTestId('mock-select')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('formats data using the utility function', () => {
|
||||
const mockRows = [
|
||||
{ data: { [SPAN_ATTRIBUTES.URL_PATH]: '/api/test', A: 10 } },
|
||||
];
|
||||
|
||||
const dataProps = {
|
||||
...defaultProps,
|
||||
endPointDropDownDataQuery: createMockQueryResult({
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: mockRows,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointsDropDown {...dataProps} />);
|
||||
|
||||
expect(getFormattedEndPointDropDownData).toHaveBeenCalledWith(mockRows);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,493 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import {
|
||||
getCustomFiltersForBarChart,
|
||||
getFormattedEndPointStatusCodeChartData,
|
||||
getStatusCodeBarChartWidgetData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import ErrorState from '../Explorer/Domains/DomainDetails/components/ErrorState';
|
||||
import StatusCodeBarCharts from '../Explorer/Domains/DomainDetails/components/StatusCodeBarCharts';
|
||||
|
||||
// Create a partial mock of the UseQueryResult interface for testing
|
||||
interface MockQueryResult {
|
||||
isLoading: boolean;
|
||||
isRefetching: boolean;
|
||||
isError: boolean;
|
||||
error?: Error;
|
||||
data?: any;
|
||||
refetch: () => void;
|
||||
}
|
||||
|
||||
// Mocks
|
||||
jest.mock('components/Uplot', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(() => <div data-testid="uplot-mock" />),
|
||||
}));
|
||||
|
||||
jest.mock('components/CeleryTask/useGetGraphCustomSeries', () => ({
|
||||
useGetGraphCustomSeries: (): { getCustomSeries: jest.Mock } => ({
|
||||
getCustomSeries: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('components/CeleryTask/useNavigateToExplorer', () => ({
|
||||
useNavigateToExplorer: (): { navigateToExplorer: jest.Mock } => ({
|
||||
navigateToExplorer: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('container/GridCardLayout/useGraphClickToShowButton', () => ({
|
||||
useGraphClickToShowButton: (): {
|
||||
componentClick: boolean;
|
||||
htmlRef: HTMLElement | null;
|
||||
} => ({
|
||||
componentClick: false,
|
||||
htmlRef: null,
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('container/GridCardLayout/useNavigateToExplorerPages', () => ({
|
||||
__esModule: true,
|
||||
default: (): { navigateToExplorerPages: jest.Mock } => ({
|
||||
navigateToExplorerPages: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useDarkMode', () => ({
|
||||
useIsDarkMode: (): boolean => false,
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useDimensions', () => ({
|
||||
useResizeObserver: (): { width: number; height: number } => ({
|
||||
width: 800,
|
||||
height: 400,
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useNotifications', () => ({
|
||||
useNotifications: (): { notifications: [] } => ({ notifications: [] }),
|
||||
}));
|
||||
|
||||
jest.mock('lib/uPlotLib/getUplotChartOptions', () => ({
|
||||
getUPlotChartOptions: jest.fn().mockReturnValue({}),
|
||||
}));
|
||||
|
||||
jest.mock('lib/uPlotLib/utils/getUplotChartData', () => ({
|
||||
getUPlotChartData: jest.fn().mockReturnValue([]),
|
||||
}));
|
||||
|
||||
// Mock utility functions
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
getFormattedEndPointStatusCodeChartData: jest.fn(),
|
||||
getStatusCodeBarChartWidgetData: jest.fn(),
|
||||
getCustomFiltersForBarChart: jest.fn(),
|
||||
statusCodeWidgetInfo: [
|
||||
{ title: 'Status Code Count', yAxisUnit: 'count' },
|
||||
{ title: 'Status Code Latency', yAxisUnit: 'ms' },
|
||||
],
|
||||
}));
|
||||
|
||||
// Mock the ErrorState component
|
||||
jest.mock('../Explorer/Domains/DomainDetails/components/ErrorState', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(({ refetch }) => (
|
||||
<div data-testid="error-state-mock">
|
||||
<button type="button" data-testid="refetch-button" onClick={refetch}>
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
// Mock antd components
|
||||
jest.mock('antd', () => {
|
||||
const originalModule = jest.requireActual('antd');
|
||||
return {
|
||||
...originalModule,
|
||||
Card: jest.fn().mockImplementation(({ children, className }) => (
|
||||
<div data-testid="card-mock" className={className}>
|
||||
{children}
|
||||
</div>
|
||||
)),
|
||||
Typography: {
|
||||
Text: jest
|
||||
.fn()
|
||||
.mockImplementation(({ children }) => (
|
||||
<div data-testid="typography-text">{children}</div>
|
||||
)),
|
||||
},
|
||||
Button: {
|
||||
...originalModule.Button,
|
||||
Group: jest.fn().mockImplementation(({ children, className }) => (
|
||||
<div data-testid="button-group" className={className}>
|
||||
{children}
|
||||
</div>
|
||||
)),
|
||||
},
|
||||
Skeleton: jest
|
||||
.fn()
|
||||
.mockImplementation(() => (
|
||||
<div data-testid="skeleton-mock">Loading skeleton...</div>
|
||||
)),
|
||||
};
|
||||
});
|
||||
|
||||
describe('StatusCodeBarCharts', () => {
|
||||
// Default props for tests
|
||||
const mockFilters: IBuilderQuery['filters'] = { items: [], op: 'AND' };
|
||||
const mockTimeRange = {
|
||||
startTime: 1609459200000,
|
||||
endTime: 1609545600000,
|
||||
};
|
||||
const mockDomainName = 'test-domain';
|
||||
const mockEndPointName = '/api/test';
|
||||
const onDragSelectMock = jest.fn();
|
||||
const refetchFn = jest.fn();
|
||||
|
||||
// Mock formatted data
|
||||
const mockFormattedData = {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
values: [[1609459200, 10]],
|
||||
metric: { statusCode: '200-299' },
|
||||
queryName: 'A',
|
||||
},
|
||||
{
|
||||
values: [[1609459200, 5]],
|
||||
metric: { statusCode: '400-499' },
|
||||
queryName: 'B',
|
||||
},
|
||||
],
|
||||
resultType: 'matrix',
|
||||
},
|
||||
};
|
||||
|
||||
// Mock filter values
|
||||
const mockStatusCodeFilters = [
|
||||
{
|
||||
id: 'test-id-1',
|
||||
key: {
|
||||
dataType: 'string',
|
||||
id: 'response_status_code--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'response_status_code',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '>=',
|
||||
value: '200',
|
||||
},
|
||||
{
|
||||
id: 'test-id-2',
|
||||
key: {
|
||||
dataType: 'string',
|
||||
id: 'response_status_code--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'response_status_code',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '<=',
|
||||
value: '299',
|
||||
},
|
||||
];
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
(getFormattedEndPointStatusCodeChartData as jest.Mock).mockReturnValue(
|
||||
mockFormattedData,
|
||||
);
|
||||
(getStatusCodeBarChartWidgetData as jest.Mock).mockReturnValue({
|
||||
id: 'test-widget',
|
||||
title: 'Status Code',
|
||||
description: 'Shows status code distribution',
|
||||
query: { builder: { queryData: [] } },
|
||||
panelTypes: 'bar',
|
||||
});
|
||||
(getCustomFiltersForBarChart as jest.Mock).mockReturnValue(
|
||||
mockStatusCodeFilters,
|
||||
);
|
||||
});
|
||||
|
||||
it('renders loading state correctly', () => {
|
||||
// Arrange
|
||||
const mockStatusCodeQuery: MockQueryResult = {
|
||||
isLoading: true,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
const mockLatencyQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<StatusCodeBarCharts
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('skeleton-mock')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders error state correctly', () => {
|
||||
// Arrange
|
||||
const mockStatusCodeQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: true,
|
||||
error: new Error('Test error'),
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
const mockLatencyQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<StatusCodeBarCharts
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('error-state-mock')).toBeInTheDocument();
|
||||
expect(ErrorState).toHaveBeenCalledWith(
|
||||
{ refetch: expect.any(Function) },
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('renders chart data correctly when loaded', () => {
|
||||
// Arrange
|
||||
const mockData = {
|
||||
payload: mockFormattedData,
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockStatusCodeQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
const mockLatencyQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<StatusCodeBarCharts
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(getFormattedEndPointStatusCodeChartData).toHaveBeenCalledWith(
|
||||
mockData.payload,
|
||||
'sum',
|
||||
);
|
||||
expect(screen.getByTestId('uplot-mock')).toBeInTheDocument();
|
||||
expect(screen.getByText('Number of calls')).toBeInTheDocument();
|
||||
expect(screen.getByText('Latency')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('switches between number of calls and latency views', () => {
|
||||
// Arrange
|
||||
const mockData = {
|
||||
payload: mockFormattedData,
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockStatusCodeQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
const mockLatencyQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<StatusCodeBarCharts
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Initially should be showing number of calls (index 0)
|
||||
const latencyButton = screen.getByText('Latency');
|
||||
|
||||
// Click to switch to latency view
|
||||
fireEvent.click(latencyButton);
|
||||
|
||||
// Should now format with the latency data
|
||||
expect(getFormattedEndPointStatusCodeChartData).toHaveBeenCalledWith(
|
||||
mockData.payload,
|
||||
'average',
|
||||
);
|
||||
});
|
||||
|
||||
it('uses getCustomFiltersForBarChart when needed', () => {
|
||||
// Arrange
|
||||
const mockData = {
|
||||
payload: mockFormattedData,
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockStatusCodeQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
const mockLatencyQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<StatusCodeBarCharts
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert
|
||||
// Initially getCustomFiltersForBarChart won't be called until a graph click event
|
||||
expect(getCustomFiltersForBarChart).not.toHaveBeenCalled();
|
||||
|
||||
// We can't easily test the graph click handler directly,
|
||||
// but we've confirmed the function is mocked and ready to be tested
|
||||
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
|
||||
mockDomainName,
|
||||
mockEndPointName,
|
||||
expect.objectContaining({
|
||||
items: [],
|
||||
op: 'AND',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('handles widget generation with current filters', () => {
|
||||
// Arrange
|
||||
const mockCustomFilters = {
|
||||
items: [
|
||||
{
|
||||
id: 'custom-filter',
|
||||
key: { key: 'test-key' },
|
||||
op: '=',
|
||||
value: 'test-value',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const mockData = {
|
||||
payload: mockFormattedData,
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockStatusCodeQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
const mockLatencyQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<StatusCodeBarCharts
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockCustomFilters as IBuilderQuery['filters']}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert widget creation was called with the correct parameters
|
||||
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
|
||||
mockDomainName,
|
||||
mockEndPointName,
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({ id: 'custom-filter' }),
|
||||
]),
|
||||
op: 'AND',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,175 @@
|
||||
import '@testing-library/jest-dom';
|
||||
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import StatusCodeTable from '../Explorer/Domains/DomainDetails/components/StatusCodeTable';
|
||||
|
||||
// Mock the ErrorState component
|
||||
jest.mock('../Explorer/Domains/DomainDetails/components/ErrorState', () =>
|
||||
jest.fn().mockImplementation(({ refetch }) => (
|
||||
<div
|
||||
data-testid="error-state-mock"
|
||||
onClick={refetch}
|
||||
onKeyDown={(e: React.KeyboardEvent<HTMLDivElement>): void => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
refetch();
|
||||
}
|
||||
}}
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
>
|
||||
Error state
|
||||
</div>
|
||||
)),
|
||||
);
|
||||
|
||||
// Mock antd components
|
||||
jest.mock('antd', () => {
|
||||
const originalModule = jest.requireActual('antd');
|
||||
return {
|
||||
...originalModule,
|
||||
Table: jest
|
||||
.fn()
|
||||
.mockImplementation(({ loading, dataSource, columns, locale }) => (
|
||||
<div data-testid="table-mock">
|
||||
{loading && <div data-testid="loading-indicator">Loading...</div>}
|
||||
{dataSource &&
|
||||
dataSource.length === 0 &&
|
||||
!loading &&
|
||||
locale?.emptyText && (
|
||||
<div data-testid="empty-table">{locale.emptyText}</div>
|
||||
)}
|
||||
{dataSource && dataSource.length > 0 && (
|
||||
<div data-testid="table-data">
|
||||
Data loaded with {dataSource.length} rows and {columns.length} columns
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)),
|
||||
Typography: {
|
||||
Text: jest.fn().mockImplementation(({ children, className }) => (
|
||||
<div data-testid="typography-text" className={className}>
|
||||
{children}
|
||||
</div>
|
||||
)),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
// Create a mock query result type
|
||||
interface MockQueryResult {
|
||||
isLoading: boolean;
|
||||
isRefetching: boolean;
|
||||
isError: boolean;
|
||||
error?: Error;
|
||||
data?: any;
|
||||
refetch: () => void;
|
||||
}
|
||||
|
||||
describe('StatusCodeTable', () => {
|
||||
const refetchFn = jest.fn();
|
||||
|
||||
it('renders loading state correctly', () => {
|
||||
// Arrange
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: true,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<StatusCodeTable endPointStatusCodeDataQuery={mockQuery as any} />);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('loading-indicator')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders error state correctly', () => {
|
||||
// Arrange
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: true,
|
||||
error: new Error('Test error'),
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<StatusCodeTable endPointStatusCodeDataQuery={mockQuery as any} />);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('error-state-mock')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders empty state when no data is available', () => {
|
||||
// Arrange
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<StatusCodeTable endPointStatusCodeDataQuery={mockQuery as any} />);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('empty-table')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders table data correctly when data is available', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
response_status_code: '200',
|
||||
A: '150', // count
|
||||
B: '10000000', // latency in nanoseconds
|
||||
C: '5', // rate
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: mockData,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<StatusCodeTable endPointStatusCodeDataQuery={mockQuery as any} />);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('table-data')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,374 @@
|
||||
import { fireEvent, render, screen, within } from '@testing-library/react';
|
||||
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
formatTopErrorsDataForTable,
|
||||
getEndPointDetailsQueryPayload,
|
||||
getTopErrorsColumnsConfig,
|
||||
getTopErrorsCoRelationQueryFilters,
|
||||
getTopErrorsQueryPayload,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { useQueries } from 'react-query';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import TopErrors from '../Explorer/Domains/DomainDetails/TopErrors';
|
||||
|
||||
// Mock the EndPointsDropDown component to avoid issues
|
||||
jest.mock(
|
||||
'../Explorer/Domains/DomainDetails/components/EndPointsDropDown',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(
|
||||
({ setSelectedEndPointName }): JSX.Element => (
|
||||
<div data-testid="endpoints-dropdown-mock">
|
||||
<select
|
||||
data-testid="endpoints-select"
|
||||
onChange={(e): void => setSelectedEndPointName(e.target.value)}
|
||||
role="combobox"
|
||||
>
|
||||
<option value="/api/test">/api/test</option>
|
||||
<option value="/api/new-endpoint">/api/new-endpoint</option>
|
||||
</select>
|
||||
</div>
|
||||
),
|
||||
),
|
||||
}),
|
||||
);
|
||||
|
||||
// Mock dependencies
|
||||
jest.mock('react-query', () => ({
|
||||
useQueries: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('components/CeleryTask/useNavigateToExplorer', () => ({
|
||||
useNavigateToExplorer: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY: ['key1', 'key2', 'key3', 'key4', 'key5'],
|
||||
formatTopErrorsDataForTable: jest.fn(),
|
||||
getEndPointDetailsQueryPayload: jest.fn(),
|
||||
getTopErrorsColumnsConfig: jest.fn(),
|
||||
getTopErrorsCoRelationQueryFilters: jest.fn(),
|
||||
getTopErrorsQueryPayload: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('TopErrors', () => {
|
||||
const mockProps = {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
domainName: 'test-domain',
|
||||
timeRange: {
|
||||
startTime: 1000000000,
|
||||
endTime: 1000010000,
|
||||
},
|
||||
initialFilters: {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
},
|
||||
};
|
||||
|
||||
// Setup basic mocks
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Mock getTopErrorsColumnsConfig
|
||||
(getTopErrorsColumnsConfig as jest.Mock).mockReturnValue([
|
||||
{
|
||||
title: 'Endpoint',
|
||||
dataIndex: 'endpointName',
|
||||
key: 'endpointName',
|
||||
},
|
||||
{
|
||||
title: 'Status Code',
|
||||
dataIndex: 'statusCode',
|
||||
key: 'statusCode',
|
||||
},
|
||||
{
|
||||
title: 'Status Message',
|
||||
dataIndex: 'statusMessage',
|
||||
key: 'statusMessage',
|
||||
},
|
||||
{
|
||||
title: 'Count',
|
||||
dataIndex: 'count',
|
||||
key: 'count',
|
||||
},
|
||||
]);
|
||||
|
||||
// Mock useQueries
|
||||
(useQueries as jest.Mock).mockImplementation((queryConfigs) => {
|
||||
// For topErrorsDataQueries
|
||||
if (
|
||||
queryConfigs.length === 1 &&
|
||||
queryConfigs[0].queryKey &&
|
||||
queryConfigs[0].queryKey[0] === REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN
|
||||
) {
|
||||
return [
|
||||
{
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
metric: {
|
||||
'http.url': '/api/test',
|
||||
status_code: '500',
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
status_message: 'Internal Server Error',
|
||||
},
|
||||
values: [[1000000100, '10']],
|
||||
queryName: 'A',
|
||||
legend: 'Test Legend',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
refetch: jest.fn(),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
// For endPointDropDownDataQueries
|
||||
return [
|
||||
{
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{
|
||||
'http.url': '/api/test',
|
||||
A: 100,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
},
|
||||
];
|
||||
});
|
||||
|
||||
// Mock formatTopErrorsDataForTable
|
||||
(formatTopErrorsDataForTable as jest.Mock).mockReturnValue([
|
||||
{
|
||||
key: '1',
|
||||
endpointName: '/api/test',
|
||||
statusCode: '500',
|
||||
statusMessage: 'Internal Server Error',
|
||||
count: 10,
|
||||
},
|
||||
]);
|
||||
|
||||
// Mock getTopErrorsQueryPayload
|
||||
(getTopErrorsQueryPayload as jest.Mock).mockReturnValue([
|
||||
{
|
||||
queryName: 'TopErrorsQuery',
|
||||
start: mockProps.timeRange.startTime,
|
||||
end: mockProps.timeRange.endTime,
|
||||
step: 60,
|
||||
},
|
||||
]);
|
||||
|
||||
// Mock getEndPointDetailsQueryPayload
|
||||
(getEndPointDetailsQueryPayload as jest.Mock).mockReturnValue([
|
||||
{},
|
||||
{},
|
||||
{
|
||||
queryName: 'EndpointDropdownQuery',
|
||||
start: mockProps.timeRange.startTime,
|
||||
end: mockProps.timeRange.endTime,
|
||||
step: 60,
|
||||
},
|
||||
]);
|
||||
|
||||
// Mock useNavigateToExplorer
|
||||
(useNavigateToExplorer as jest.Mock).mockReturnValue(jest.fn());
|
||||
|
||||
// Mock getTopErrorsCoRelationQueryFilters
|
||||
(getTopErrorsCoRelationQueryFilters as jest.Mock).mockReturnValue({
|
||||
items: [
|
||||
{ id: 'test1', key: { key: 'domain' }, op: '=', value: 'test-domain' },
|
||||
{ id: 'test2', key: { key: 'endpoint' }, op: '=', value: '/api/test' },
|
||||
{ id: 'test3', key: { key: 'status' }, op: '=', value: '500' },
|
||||
],
|
||||
op: 'AND',
|
||||
});
|
||||
});
|
||||
|
||||
it('renders component correctly', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
const { container } = render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Check if the title and toggle are rendered
|
||||
expect(screen.getByText('Errors with Status Message')).toBeInTheDocument();
|
||||
expect(screen.getByText('Status Message Exists')).toBeInTheDocument();
|
||||
|
||||
// Find the table row and verify content
|
||||
const tableBody = container.querySelector('.ant-table-tbody');
|
||||
expect(tableBody).not.toBeNull();
|
||||
|
||||
if (tableBody) {
|
||||
const row = within(tableBody as HTMLElement).getByRole('row');
|
||||
expect(within(row).getByText('/api/test')).toBeInTheDocument();
|
||||
expect(within(row).getByText('500')).toBeInTheDocument();
|
||||
expect(within(row).getByText('Internal Server Error')).toBeInTheDocument();
|
||||
}
|
||||
});
|
||||
|
||||
it('renders error state when isError is true', () => {
|
||||
// Mock useQueries to return isError: true
|
||||
(useQueries as jest.Mock).mockImplementationOnce(() => [
|
||||
{
|
||||
isError: true,
|
||||
refetch: jest.fn(),
|
||||
},
|
||||
]);
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Error state should be shown with the actual text displayed in the UI
|
||||
expect(
|
||||
screen.getByText('Uh-oh :/ We ran into an error.'),
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByText('Please refresh this panel.')).toBeInTheDocument();
|
||||
expect(screen.getByText('Refresh this panel')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('handles row click correctly', () => {
|
||||
const navigateMock = jest.fn();
|
||||
(useNavigateToExplorer as jest.Mock).mockReturnValue(navigateMock);
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
const { container } = render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Find and click on the table cell containing the endpoint
|
||||
const tableBody = container.querySelector('.ant-table-tbody');
|
||||
expect(tableBody).not.toBeNull();
|
||||
|
||||
if (tableBody) {
|
||||
const row = within(tableBody as HTMLElement).getByRole('row');
|
||||
const cellWithEndpoint = within(row).getByText('/api/test');
|
||||
fireEvent.click(cellWithEndpoint);
|
||||
}
|
||||
|
||||
// Check if navigateToExplorer was called with correct params
|
||||
expect(navigateMock).toHaveBeenCalledWith({
|
||||
filters: [
|
||||
{ id: 'test1', key: { key: 'domain' }, op: '=', value: 'test-domain' },
|
||||
{ id: 'test2', key: { key: 'endpoint' }, op: '=', value: '/api/test' },
|
||||
{ id: 'test3', key: { key: 'status' }, op: '=', value: '500' },
|
||||
],
|
||||
dataSource: DataSource.TRACES,
|
||||
startTime: mockProps.timeRange.startTime,
|
||||
endTime: mockProps.timeRange.endTime,
|
||||
shouldResolveQuery: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('updates endpoint filter when dropdown value changes', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Find the dropdown
|
||||
const dropdown = screen.getByRole('combobox');
|
||||
|
||||
// Mock the change
|
||||
fireEvent.change(dropdown, { target: { value: '/api/new-endpoint' } });
|
||||
|
||||
// Check if getTopErrorsQueryPayload was called with updated parameters
|
||||
expect(getTopErrorsQueryPayload).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('handles status message toggle correctly', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Find the toggle switch
|
||||
const toggle = screen.getByRole('switch');
|
||||
expect(toggle).toBeInTheDocument();
|
||||
|
||||
// Toggle should be on by default
|
||||
expect(toggle).toHaveAttribute('aria-checked', 'true');
|
||||
|
||||
// Click the toggle to turn it off
|
||||
fireEvent.click(toggle);
|
||||
|
||||
// Check if getTopErrorsQueryPayload was called with showStatusCodeErrors=false
|
||||
expect(getTopErrorsQueryPayload).toHaveBeenCalledWith(
|
||||
mockProps.domainName,
|
||||
mockProps.timeRange.startTime,
|
||||
mockProps.timeRange.endTime,
|
||||
expect.any(Object),
|
||||
false,
|
||||
);
|
||||
|
||||
// Title should change
|
||||
expect(screen.getByText('All Errors')).toBeInTheDocument();
|
||||
|
||||
// Click the toggle to turn it back on
|
||||
fireEvent.click(toggle);
|
||||
|
||||
// Check if getTopErrorsQueryPayload was called with showStatusCodeErrors=true
|
||||
expect(getTopErrorsQueryPayload).toHaveBeenCalledWith(
|
||||
mockProps.domainName,
|
||||
mockProps.timeRange.startTime,
|
||||
mockProps.timeRange.endTime,
|
||||
expect.any(Object),
|
||||
true,
|
||||
);
|
||||
|
||||
// Title should change back
|
||||
expect(screen.getByText('Errors with Status Message')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('includes toggle state in query key for cache busting', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
const toggle = screen.getByRole('switch');
|
||||
|
||||
// Initial query should include showStatusCodeErrors=true
|
||||
expect(useQueries).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
queryKey: expect.arrayContaining([
|
||||
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
|
||||
expect.any(Object),
|
||||
expect.any(String),
|
||||
true,
|
||||
]),
|
||||
}),
|
||||
]),
|
||||
);
|
||||
|
||||
// Click toggle
|
||||
fireEvent.click(toggle);
|
||||
|
||||
// Query should be called with showStatusCodeErrors=false in key
|
||||
expect(useQueries).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
queryKey: expect.arrayContaining([
|
||||
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
|
||||
expect.any(Object),
|
||||
expect.any(String),
|
||||
false,
|
||||
]),
|
||||
}),
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
@@ -56,6 +56,7 @@ function WidgetGraphComponent({
|
||||
onOpenTraceBtnClick,
|
||||
customSeries,
|
||||
customErrorMessage,
|
||||
customOnRowClick,
|
||||
}: WidgetGraphComponentProps): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const [deleteModal, setDeleteModal] = useState(false);
|
||||
@@ -380,6 +381,7 @@ function WidgetGraphComponent({
|
||||
openTracesButton={openTracesButton}
|
||||
onOpenTraceBtnClick={onOpenTraceBtnClick}
|
||||
customSeries={customSeries}
|
||||
customOnRowClick={customOnRowClick}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -47,6 +47,8 @@ function GridCardGraph({
|
||||
start,
|
||||
end,
|
||||
analyticsEvent,
|
||||
customTimeRange,
|
||||
customOnRowClick,
|
||||
}: GridCardGraphProps): JSX.Element {
|
||||
const dispatch = useDispatch();
|
||||
const [errorMessage, setErrorMessage] = useState<string>();
|
||||
@@ -130,6 +132,8 @@ function GridCardGraph({
|
||||
variables: getDashboardVariables(variables),
|
||||
fillGaps: widget.fillSpans,
|
||||
formatForWeb: widget.panelTypes === PANEL_TYPES.TABLE,
|
||||
start: customTimeRange?.startTime || start,
|
||||
end: customTimeRange?.endTime || end,
|
||||
};
|
||||
}
|
||||
updatedQuery.builder.queryData[0].pageSize = 10;
|
||||
@@ -149,6 +153,8 @@ function GridCardGraph({
|
||||
initialDataSource === DataSource.TRACES && widget.selectedTracesFields,
|
||||
},
|
||||
fillGaps: widget.fillSpans,
|
||||
start: customTimeRange?.startTime || start,
|
||||
end: customTimeRange?.endTime || end,
|
||||
};
|
||||
});
|
||||
|
||||
@@ -187,8 +193,8 @@ function GridCardGraph({
|
||||
variables: getDashboardVariables(variables),
|
||||
selectedTime: widget.timePreferance || 'GLOBAL_TIME',
|
||||
globalSelectedInterval,
|
||||
start,
|
||||
end,
|
||||
start: customTimeRange?.startTime || start,
|
||||
end: customTimeRange?.endTime || end,
|
||||
},
|
||||
version || DEFAULT_ENTITY_VERSION,
|
||||
{
|
||||
@@ -202,6 +208,9 @@ function GridCardGraph({
|
||||
widget.timePreferance,
|
||||
widget.fillSpans,
|
||||
requestData,
|
||||
...(customTimeRange && customTimeRange.startTime && customTimeRange.endTime
|
||||
? [customTimeRange.startTime, customTimeRange.endTime]
|
||||
: []),
|
||||
],
|
||||
retry(failureCount, error): boolean {
|
||||
if (
|
||||
@@ -279,6 +288,7 @@ function GridCardGraph({
|
||||
onOpenTraceBtnClick={onOpenTraceBtnClick}
|
||||
customSeries={customSeries}
|
||||
customErrorMessage={isInternalServerError ? customErrorMessage : undefined}
|
||||
customOnRowClick={customOnRowClick}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -39,6 +39,7 @@ export interface WidgetGraphComponentProps {
|
||||
onOpenTraceBtnClick?: (record: RowData) => void;
|
||||
customSeries?: (data: QueryData[]) => uPlot.Series[];
|
||||
customErrorMessage?: string;
|
||||
customOnRowClick?: (record: RowData) => void;
|
||||
}
|
||||
|
||||
export interface GridCardGraphProps {
|
||||
@@ -61,6 +62,11 @@ export interface GridCardGraphProps {
|
||||
start?: number;
|
||||
end?: number;
|
||||
analyticsEvent?: string;
|
||||
customTimeRange?: {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
};
|
||||
customOnRowClick?: (record: RowData) => void;
|
||||
}
|
||||
|
||||
export interface GetGraphVisibilityStateOnLegendClickProps {
|
||||
|
||||
@@ -43,6 +43,7 @@ function GridTableComponent({
|
||||
sticky,
|
||||
openTracesButton,
|
||||
onOpenTraceBtnClick,
|
||||
customOnRowClick,
|
||||
widgetId,
|
||||
...props
|
||||
}: GridTableComponentProps): JSX.Element {
|
||||
@@ -214,6 +215,30 @@ function GridTableComponent({
|
||||
[newColumnData],
|
||||
);
|
||||
|
||||
const newColumnsWithRenderColumnCell = useMemo(
|
||||
() =>
|
||||
newColumnData.map((column) => ({
|
||||
...column,
|
||||
...('dataIndex' in column &&
|
||||
props.renderColumnCell?.[column.dataIndex as string]
|
||||
? { render: props.renderColumnCell[column.dataIndex as string] }
|
||||
: {}),
|
||||
})),
|
||||
[newColumnData, props.renderColumnCell],
|
||||
);
|
||||
|
||||
const newColumnsWithCustomColTitles = useMemo(
|
||||
() =>
|
||||
newColumnsWithRenderColumnCell.map((column) => ({
|
||||
...column,
|
||||
...('dataIndex' in column &&
|
||||
props.customColTitles?.[column.dataIndex as string]
|
||||
? { title: props.customColTitles[column.dataIndex as string] }
|
||||
: {}),
|
||||
})),
|
||||
[newColumnsWithRenderColumnCell, props.customColTitles],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
eventEmitter.emit(Events.TABLE_COLUMNS_DATA, {
|
||||
columns: newColumnData,
|
||||
@@ -227,15 +252,22 @@ function GridTableComponent({
|
||||
query={query}
|
||||
queryTableData={data}
|
||||
loading={false}
|
||||
columns={openTracesButton ? columnDataWithOpenTracesButton : newColumnData}
|
||||
columns={
|
||||
openTracesButton
|
||||
? columnDataWithOpenTracesButton
|
||||
: newColumnsWithCustomColTitles
|
||||
}
|
||||
dataSource={dataSource}
|
||||
sticky={sticky}
|
||||
widgetId={widgetId}
|
||||
onRow={
|
||||
openTracesButton
|
||||
openTracesButton || customOnRowClick
|
||||
? (record): React.HTMLAttributes<HTMLElement> => ({
|
||||
onClick: (): void => {
|
||||
onOpenTraceBtnClick?.(record);
|
||||
if (openTracesButton) {
|
||||
onOpenTraceBtnClick?.(record);
|
||||
}
|
||||
customOnRowClick?.(record);
|
||||
},
|
||||
})
|
||||
: undefined
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
ThresholdOperators,
|
||||
ThresholdProps,
|
||||
} from 'container/NewWidget/RightContainer/Threshold/types';
|
||||
import { QueryTableProps } from 'container/QueryTable/QueryTable.intefaces';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { ColumnUnit } from 'types/api/dashboard/getAll';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -17,7 +18,10 @@ export type GridTableComponentProps = {
|
||||
searchTerm?: string;
|
||||
openTracesButton?: boolean;
|
||||
onOpenTraceBtnClick?: (record: RowData) => void;
|
||||
customOnRowClick?: (record: RowData) => void;
|
||||
widgetId?: string;
|
||||
renderColumnCell?: QueryTableProps['renderColumnCell'];
|
||||
customColTitles?: Record<string, string>;
|
||||
} & Pick<LogsExplorerTableProps, 'data'> &
|
||||
Omit<TableProps<RowData>, 'columns' | 'dataSource'>;
|
||||
|
||||
|
||||
@@ -451,6 +451,9 @@ function OnboardingAddDataSource(): JSX.Element {
|
||||
case 'integrations':
|
||||
history.push(ROUTES.INTEGRATIONS);
|
||||
break;
|
||||
case 'home':
|
||||
history.push(ROUTES.HOME);
|
||||
break;
|
||||
default:
|
||||
history.push(ROUTES.APPLICATION);
|
||||
}
|
||||
|
||||
@@ -12,6 +12,34 @@
|
||||
"imgUrl": "/Logos/quickstart.svg",
|
||||
"link": "https://signoz.io/docs/cloud/quickstart/"
|
||||
},
|
||||
{
|
||||
"dataSource": "migrate-from-datadog",
|
||||
"label": "From Datadog",
|
||||
"tags": ["migrate to SigNoz"],
|
||||
"module": "home",
|
||||
"relatedSearchKeywords": [
|
||||
"datadog",
|
||||
"opentelemetry"
|
||||
],
|
||||
"imgUrl": "/Logos/datadog.svg",
|
||||
"link": "https://signoz.io/docs/migration/migrate-from-datadog/"
|
||||
},
|
||||
{
|
||||
"dataSource": "migrate-from-lgtm",
|
||||
"label": "From Grafana",
|
||||
"tags": ["migrate to SigNoz"],
|
||||
"module": "home",
|
||||
"relatedSearchKeywords": [
|
||||
"grafana",
|
||||
"loki",
|
||||
"mirmir",
|
||||
"tempo",
|
||||
"alloy",
|
||||
"opentelemetry"
|
||||
],
|
||||
"imgUrl": "/Logos/grafana.svg",
|
||||
"link": "https://signoz.io/docs/migration/migrate-from-grafana/"
|
||||
},
|
||||
{
|
||||
"dataSource": "java",
|
||||
"entityID": "dataSource",
|
||||
@@ -1109,6 +1137,21 @@
|
||||
"id": "opentelemetry-cpp",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-cpp/"
|
||||
},
|
||||
{
|
||||
"dataSource": "nginx-tracing",
|
||||
"label": "Nginx - Tracing",
|
||||
"imgUrl": "/Logos/nginx.svg",
|
||||
"tags": ["apm"],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"tracing",
|
||||
"nginx server",
|
||||
"nginx proxy",
|
||||
"nginx"
|
||||
],
|
||||
"id": "nginx-tracing",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-nginx/"
|
||||
},
|
||||
{
|
||||
"dataSource": "kubernetes-pod-logs",
|
||||
"label": "Kubernetes Pod Logs",
|
||||
@@ -2874,5 +2917,43 @@
|
||||
"imgUrl": "/Logos/rds.svg",
|
||||
"link": "/integrations?integration=aws-integration&service=rds",
|
||||
"internalRedirect": true
|
||||
},
|
||||
{
|
||||
"dataSource": "temporal",
|
||||
"label": "Temporal",
|
||||
"imgUrl": "/Logos/temporal.svg",
|
||||
"tags": ["integrations"],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"temporal metrics",
|
||||
"temporal traces",
|
||||
"temporal logs",
|
||||
"temporal cloud",
|
||||
"temporal"
|
||||
],
|
||||
"question": {
|
||||
"desc": "What are you using ?",
|
||||
"type": "select",
|
||||
"options": [
|
||||
{
|
||||
"key": "temporal-cloud",
|
||||
"label": "Cloud Metrics",
|
||||
"imgUrl": "/Logos/temporal.svg",
|
||||
"link": "https://signoz.io/docs/integrations/temporal-cloud-metrics/"
|
||||
},
|
||||
{
|
||||
"key": "temporal-golang",
|
||||
"label": "Go",
|
||||
"imgUrl": "/Logos/go.svg",
|
||||
"link": "https://signoz.io/docs/integrations/temporal-golang-opentelemetry/"
|
||||
},
|
||||
{
|
||||
"key": "temporal-typescript",
|
||||
"label": "TypeScript",
|
||||
"imgUrl": "/Logos/javascript.svg",
|
||||
"link": "https://signoz.io/docs/integrations/temporal-typescript-opentelemetry/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -20,6 +20,7 @@ function PanelWrapper({
|
||||
openTracesButton,
|
||||
onOpenTraceBtnClick,
|
||||
customSeries,
|
||||
customOnRowClick,
|
||||
}: PanelWrapperProps): JSX.Element {
|
||||
const Component = PanelTypeVsPanelWrapper[
|
||||
selectedGraph || widget.panelTypes
|
||||
@@ -46,6 +47,7 @@ function PanelWrapper({
|
||||
searchTerm={searchTerm}
|
||||
openTracesButton={openTracesButton}
|
||||
onOpenTraceBtnClick={onOpenTraceBtnClick}
|
||||
customOnRowClick={customOnRowClick}
|
||||
customSeries={customSeries}
|
||||
/>
|
||||
);
|
||||
|
||||
@@ -11,6 +11,7 @@ function TablePanelWrapper({
|
||||
searchTerm,
|
||||
openTracesButton,
|
||||
onOpenTraceBtnClick,
|
||||
customOnRowClick,
|
||||
}: PanelWrapperProps): JSX.Element {
|
||||
const panelData =
|
||||
(queryResponse.data?.payload?.data?.result?.[0] as any)?.table || [];
|
||||
@@ -26,7 +27,10 @@ function TablePanelWrapper({
|
||||
searchTerm={searchTerm}
|
||||
openTracesButton={openTracesButton}
|
||||
onOpenTraceBtnClick={onOpenTraceBtnClick}
|
||||
customOnRowClick={customOnRowClick}
|
||||
widgetId={widget.id}
|
||||
renderColumnCell={widget.renderColumnCell}
|
||||
customColTitles={widget.customColTitles}
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
{...GRID_TABLE_CONFIG}
|
||||
/>
|
||||
|
||||
@@ -28,6 +28,7 @@ export type PanelWrapperProps = {
|
||||
customTooltipElement?: HTMLDivElement;
|
||||
openTracesButton?: boolean;
|
||||
onOpenTraceBtnClick?: (record: RowData) => void;
|
||||
customOnRowClick?: (record: RowData) => void;
|
||||
customSeries?: (data: QueryData[]) => uPlot.Series[];
|
||||
};
|
||||
|
||||
|
||||
@@ -24,6 +24,11 @@ export default function Toolbar({
|
||||
const isLogsExplorerPage = useMemo(() => pathname === ROUTES.LOGS_EXPLORER, [
|
||||
pathname,
|
||||
]);
|
||||
|
||||
const isApiMonitoringPage = useMemo(() => pathname === ROUTES.API_MONITORING, [
|
||||
pathname,
|
||||
]);
|
||||
|
||||
return (
|
||||
<div className="toolbar">
|
||||
<div className="leftActions">{leftActions}</div>
|
||||
@@ -31,7 +36,7 @@ export default function Toolbar({
|
||||
{showOldCTA && <NewExplorerCTA />}
|
||||
<DateTimeSelectionV2
|
||||
showAutoRefresh={showAutoRefresh}
|
||||
showRefreshText={!isLogsExplorerPage}
|
||||
showRefreshText={!isLogsExplorerPage && !isApiMonitoringPage}
|
||||
/>
|
||||
</div>
|
||||
<div className="rightActions">{rightActions}</div>
|
||||
|
||||
@@ -75,6 +75,8 @@ function DateTimeSelection({
|
||||
isModalTimeSelection = false,
|
||||
onTimeChange,
|
||||
modalSelectedInterval,
|
||||
modalInitialStartTime,
|
||||
modalInitialEndTime,
|
||||
}: Props): JSX.Element {
|
||||
const [formSelector] = Form.useForm();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
@@ -94,6 +96,36 @@ function DateTimeSelection({
|
||||
const [, handleCopyToClipboard] = useCopyToClipboard();
|
||||
const [isURLCopied, setIsURLCopied] = useState(false);
|
||||
|
||||
// Prioritize props for initial modal time, fallback to URL params
|
||||
let initialModalStartTime = 0;
|
||||
if (modalInitialStartTime !== undefined) {
|
||||
initialModalStartTime = modalInitialStartTime;
|
||||
} else if (searchStartTime) {
|
||||
initialModalStartTime = parseInt(searchStartTime, 10);
|
||||
}
|
||||
|
||||
let initialModalEndTime = 0;
|
||||
if (modalInitialEndTime !== undefined) {
|
||||
initialModalEndTime = modalInitialEndTime;
|
||||
} else if (searchEndTime) {
|
||||
initialModalEndTime = parseInt(searchEndTime, 10);
|
||||
}
|
||||
|
||||
const [modalStartTime, setModalStartTime] = useState<number>(
|
||||
initialModalStartTime,
|
||||
);
|
||||
const [modalEndTime, setModalEndTime] = useState<number>(initialModalEndTime);
|
||||
|
||||
// Effect to update modal time state when props change
|
||||
useEffect(() => {
|
||||
if (modalInitialStartTime !== undefined) {
|
||||
setModalStartTime(modalInitialStartTime);
|
||||
}
|
||||
if (modalInitialEndTime !== undefined) {
|
||||
setModalEndTime(modalInitialEndTime);
|
||||
}
|
||||
}, [modalInitialStartTime, modalInitialEndTime]);
|
||||
|
||||
const {
|
||||
localstorageStartTime,
|
||||
localstorageEndTime,
|
||||
@@ -212,7 +244,6 @@ function DateTimeSelection({
|
||||
|
||||
const startString = startTime.format(format);
|
||||
const endString = endTime.format(format);
|
||||
|
||||
return `${startString} - ${endString}`;
|
||||
}
|
||||
return timeInterval;
|
||||
@@ -383,13 +414,6 @@ function DateTimeSelection({
|
||||
}
|
||||
}, [defaultRelativeTime, onSelectHandler]);
|
||||
|
||||
const [modalStartTime, setModalStartTime] = useState<number>(
|
||||
searchStartTime ? parseInt(searchStartTime, 10) : 0,
|
||||
);
|
||||
const [modalEndTime, setModalEndTime] = useState<number>(
|
||||
searchEndTime ? parseInt(searchEndTime, 10) : 0,
|
||||
);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const onCustomDateHandler = (dateTimeRange: DateTimeRangeType): void => {
|
||||
if (dateTimeRange !== null) {
|
||||
@@ -864,6 +888,8 @@ interface DateTimeSelectionV2Props {
|
||||
dateTimeRange?: [number, number],
|
||||
) => void;
|
||||
modalSelectedInterval?: Time;
|
||||
modalInitialStartTime?: number;
|
||||
modalInitialEndTime?: number;
|
||||
}
|
||||
|
||||
DateTimeSelection.defaultProps = {
|
||||
@@ -875,6 +901,8 @@ DateTimeSelection.defaultProps = {
|
||||
isModalTimeSelection: false,
|
||||
onTimeChange: (): void => {},
|
||||
modalSelectedInterval: RelativeTimeMap['5m'] as Time,
|
||||
modalInitialStartTime: undefined,
|
||||
modalInitialEndTime: undefined,
|
||||
};
|
||||
interface DispatchProps {
|
||||
updateTimeInterval: (
|
||||
|
||||
@@ -59,6 +59,7 @@ export interface GetUPlotChartOptions {
|
||||
timezone?: string;
|
||||
customSeries?: (data: QueryData[]) => uPlot.Series[];
|
||||
isLogScale?: boolean;
|
||||
colorMapping?: Record<string, string>;
|
||||
}
|
||||
|
||||
/** the function converts series A , series B , series C to
|
||||
@@ -166,6 +167,7 @@ export const getUPlotChartOptions = ({
|
||||
timezone,
|
||||
customSeries,
|
||||
isLogScale,
|
||||
colorMapping,
|
||||
}: GetUPlotChartOptions): uPlot.Options => {
|
||||
const timeScaleProps = getXAxisScale(minTimeScale, maxTimeScale);
|
||||
|
||||
@@ -229,10 +231,11 @@ export const getUPlotChartOptions = ({
|
||||
tooltipPlugin({
|
||||
apiResponse,
|
||||
yAxisUnit,
|
||||
stackBarChart,
|
||||
isDarkMode,
|
||||
customTooltipElement,
|
||||
stackBarChart,
|
||||
timezone,
|
||||
colorMapping,
|
||||
customTooltipElement,
|
||||
}),
|
||||
onClickPlugin({
|
||||
onClick: onClickHandler,
|
||||
|
||||
@@ -48,6 +48,7 @@ const generateTooltipContent = (
|
||||
isMergedSeries?: boolean,
|
||||
stackBarChart?: boolean,
|
||||
timezone?: string,
|
||||
colorMapping?: Record<string, string>,
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
): HTMLElement => {
|
||||
const container = document.createElement('div');
|
||||
@@ -95,10 +96,12 @@ const generateTooltipContent = (
|
||||
? ''
|
||||
: getLabelName(metric, queryName || '', legend || '');
|
||||
|
||||
let color = generateColor(
|
||||
label,
|
||||
isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor,
|
||||
);
|
||||
let color =
|
||||
colorMapping?.[label] ||
|
||||
generateColor(
|
||||
label,
|
||||
isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor,
|
||||
);
|
||||
|
||||
// in case of billing graph pick colors from the series options
|
||||
if (isBillingUsageGraphs) {
|
||||
@@ -230,6 +233,7 @@ type ToolTipPluginProps = {
|
||||
isDarkMode: boolean;
|
||||
customTooltipElement?: HTMLDivElement;
|
||||
timezone?: string;
|
||||
colorMapping?: Record<string, string>;
|
||||
};
|
||||
|
||||
const tooltipPlugin = ({
|
||||
@@ -242,6 +246,7 @@ const tooltipPlugin = ({
|
||||
isDarkMode,
|
||||
customTooltipElement,
|
||||
timezone,
|
||||
colorMapping,
|
||||
}: // eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
ToolTipPluginProps): any => {
|
||||
let over: HTMLElement;
|
||||
@@ -309,6 +314,7 @@ ToolTipPluginProps): any => {
|
||||
isMergedSeries,
|
||||
stackBarChart,
|
||||
timezone,
|
||||
colorMapping,
|
||||
);
|
||||
if (customTooltipElement) {
|
||||
content.appendChild(customTooltipElement);
|
||||
|
||||
59
frontend/src/pages/ApiMonitoring/ApiMonitoringPage.test.tsx
Normal file
59
frontend/src/pages/ApiMonitoring/ApiMonitoringPage.test.tsx
Normal file
@@ -0,0 +1,59 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
|
||||
import ApiMonitoringPage from './ApiMonitoringPage';
|
||||
|
||||
// Mock the child component to isolate the ApiMonitoringPage logic
|
||||
// We are not testing ExplorerPage here, just that ApiMonitoringPage renders it via RouteTab.
|
||||
jest.mock('container/ApiMonitoring/Explorer/Explorer', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => <div>Mocked Explorer Page</div>,
|
||||
}));
|
||||
|
||||
// Mock the RouteTab component
|
||||
jest.mock('components/RouteTab', () => ({
|
||||
__esModule: true,
|
||||
default: ({
|
||||
routes,
|
||||
activeKey,
|
||||
}: {
|
||||
routes: any[];
|
||||
activeKey: string;
|
||||
}): JSX.Element => (
|
||||
<div data-testid="route-tab">
|
||||
<span>Active Key: {activeKey}</span>
|
||||
{/* Render the component defined in the route for the activeKey */}
|
||||
{routes.find((route) => route.key === activeKey)?.Component()}
|
||||
</div>
|
||||
),
|
||||
}));
|
||||
|
||||
// Mock useLocation hook to properly return the path we're testing
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: (): { pathname: string } => ({
|
||||
pathname: '/api-monitoring/explorer',
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('ApiMonitoringPage', () => {
|
||||
it('should render the RouteTab with the Explorer tab', () => {
|
||||
render(
|
||||
<MemoryRouter initialEntries={['/api-monitoring/explorer']}>
|
||||
<ApiMonitoringPage />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
// Check if the mock RouteTab is rendered
|
||||
expect(screen.getByTestId('route-tab')).toBeInTheDocument();
|
||||
|
||||
// Instead of checking for the mock component, just verify the RouteTab is there
|
||||
// and has the correct active key
|
||||
expect(screen.getByText(/Active Key:/)).toBeInTheDocument();
|
||||
|
||||
// We can't test for the Explorer page being rendered right now
|
||||
// but we'll verify the structure exists
|
||||
});
|
||||
|
||||
// Add more tests here later, e.g., testing navigation if more tabs were added
|
||||
});
|
||||
@@ -3,7 +3,10 @@ import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { GetWidgetQueryBuilderProps } from 'container/MetricsApplication/types';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
IBuilderFormula,
|
||||
IBuilderQuery,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
@@ -12,6 +15,7 @@ interface GetWidgetQueryProps {
|
||||
title: string;
|
||||
description: string;
|
||||
queryData: IBuilderQuery[];
|
||||
queryFormulas?: IBuilderFormula[];
|
||||
panelTypes?: PANEL_TYPES;
|
||||
yAxisUnit?: string;
|
||||
columnUnits?: Record<string, string>;
|
||||
@@ -67,7 +71,7 @@ export function getWidgetQuery(
|
||||
promql: [],
|
||||
builder: {
|
||||
queryData: props.queryData,
|
||||
queryFormulas: [],
|
||||
queryFormulas: (props.queryFormulas as IBuilderFormula[]) || [],
|
||||
},
|
||||
clickhouse_sql: [],
|
||||
id: uuid(),
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
|
||||
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
|
||||
import { QueryTableProps } from 'container/QueryTable/QueryTable.intefaces';
|
||||
import { ReactNode } from 'react';
|
||||
import { Layout } from 'react-grid-layout';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -113,6 +114,8 @@ export interface IBaseWidget {
|
||||
}
|
||||
export interface Widgets extends IBaseWidget {
|
||||
query: Query;
|
||||
renderColumnCell?: QueryTableProps['renderColumnCell'];
|
||||
customColTitles?: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface PromQLWidgets extends IBaseWidget {
|
||||
|
||||
19
pkg/cache/cache.go
vendored
19
pkg/cache/cache.go
vendored
@@ -6,6 +6,8 @@ import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"time"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
// cacheable entity
|
||||
@@ -61,11 +63,14 @@ func (s RetrieveStatus) String() string {
|
||||
|
||||
// cache interface
|
||||
type Cache interface {
|
||||
Connect(ctx context.Context) error
|
||||
Store(ctx context.Context, cacheKey string, data CacheableEntity, ttl time.Duration) error
|
||||
Retrieve(ctx context.Context, cacheKey string, dest CacheableEntity, allowExpired bool) (RetrieveStatus, error)
|
||||
SetTTL(ctx context.Context, cacheKey string, ttl time.Duration)
|
||||
Remove(ctx context.Context, cacheKey string)
|
||||
BulkRemove(ctx context.Context, cacheKeys []string)
|
||||
Close(ctx context.Context) error
|
||||
Set(ctx context.Context, orgID string, cacheKey string, data CacheableEntity, ttl time.Duration) error
|
||||
Get(ctx context.Context, orgID string, cacheKey string, dest CacheableEntity, allowExpired bool) (RetrieveStatus, error)
|
||||
Delete(ctx context.Context, orgID string, cacheKey string)
|
||||
DeleteMany(ctx context.Context, orgID string, cacheKeys []string)
|
||||
}
|
||||
|
||||
type KeyGenerator interface {
|
||||
// GenerateKeys generates the cache keys for the given query range params
|
||||
// The keys are returned as a map where the key is the query name and the value is the cache key
|
||||
GenerateKeys(*v3.QueryRangeParamsV3) map[string]string
|
||||
}
|
||||
|
||||
40
pkg/cache/memorycache/provider.go
vendored
40
pkg/cache/memorycache/provider.go
vendored
@@ -23,25 +23,20 @@ func New(ctx context.Context, settings factory.ProviderSettings, config cache.Co
|
||||
return &provider{cc: go_cache.New(config.Memory.TTL, config.Memory.CleanupInterval)}, nil
|
||||
}
|
||||
|
||||
// Connect does nothing
|
||||
func (c *provider) Connect(_ context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Store stores the data in the cache
|
||||
func (c *provider) Store(_ context.Context, cacheKey string, data cache.CacheableEntity, ttl time.Duration) error {
|
||||
func (c *provider) Set(_ context.Context, orgID string, cacheKey string, data cache.CacheableEntity, ttl time.Duration) error {
|
||||
// check if the data being passed is a pointer and is not nil
|
||||
rv := reflect.ValueOf(data)
|
||||
if rv.Kind() != reflect.Pointer || rv.IsNil() {
|
||||
return cache.WrapCacheableEntityErrors(reflect.TypeOf(data), "inmemory")
|
||||
}
|
||||
|
||||
c.cc.Set(cacheKey, data, ttl)
|
||||
c.cc.Set(fmt.Sprintf("%s::%s", orgID, cacheKey), data, ttl)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Retrieve retrieves the data from the cache
|
||||
func (c *provider) Retrieve(_ context.Context, cacheKey string, dest cache.CacheableEntity, allowExpired bool) (cache.RetrieveStatus, error) {
|
||||
func (c *provider) Get(_ context.Context, orgID string, cacheKey string, dest cache.CacheableEntity, allowExpired bool) (cache.RetrieveStatus, error) {
|
||||
// check if the destination being passed is a pointer and is not nil
|
||||
dstv := reflect.ValueOf(dest)
|
||||
if dstv.Kind() != reflect.Pointer || dstv.IsNil() {
|
||||
@@ -53,7 +48,7 @@ func (c *provider) Retrieve(_ context.Context, cacheKey string, dest cache.Cache
|
||||
return cache.RetrieveStatusError, fmt.Errorf("destination value is not settable, %s", dstv.Elem())
|
||||
}
|
||||
|
||||
data, found := c.cc.Get(cacheKey)
|
||||
data, found := c.cc.Get(fmt.Sprintf("%s::%s", orgID, cacheKey))
|
||||
if !found {
|
||||
return cache.RetrieveStatusKeyMiss, nil
|
||||
}
|
||||
@@ -69,33 +64,14 @@ func (c *provider) Retrieve(_ context.Context, cacheKey string, dest cache.Cache
|
||||
return cache.RetrieveStatusHit, nil
|
||||
}
|
||||
|
||||
// SetTTL sets the TTL for the cache entry
|
||||
func (c *provider) SetTTL(_ context.Context, cacheKey string, ttl time.Duration) {
|
||||
item, found := c.cc.Get(cacheKey)
|
||||
if !found {
|
||||
return
|
||||
}
|
||||
_ = c.cc.Replace(cacheKey, item, ttl)
|
||||
}
|
||||
|
||||
// Remove removes the cache entry
|
||||
func (c *provider) Remove(_ context.Context, cacheKey string) {
|
||||
c.cc.Delete(cacheKey)
|
||||
func (c *provider) Delete(_ context.Context, orgID string, cacheKey string) {
|
||||
c.cc.Delete(fmt.Sprintf("%s::%s", orgID, cacheKey))
|
||||
}
|
||||
|
||||
// BulkRemove removes the cache entries
|
||||
func (c *provider) BulkRemove(_ context.Context, cacheKeys []string) {
|
||||
func (c *provider) DeleteMany(_ context.Context, orgID string, cacheKeys []string) {
|
||||
for _, cacheKey := range cacheKeys {
|
||||
c.cc.Delete(cacheKey)
|
||||
c.cc.Delete(fmt.Sprintf("%s::%s", orgID, cacheKey))
|
||||
}
|
||||
}
|
||||
|
||||
// Close does nothing
|
||||
func (c *provider) Close(_ context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Configuration returns the cache configuration
|
||||
func (c *provider) Configuration() *cache.Memory {
|
||||
return nil
|
||||
}
|
||||
|
||||
85
pkg/cache/rediscache/provider.go
vendored
85
pkg/cache/rediscache/provider.go
vendored
@@ -14,7 +14,6 @@ import (
|
||||
|
||||
type provider struct {
|
||||
client *redis.Client
|
||||
opts cache.Redis
|
||||
}
|
||||
|
||||
func NewFactory() factory.ProviderFactory[cache.Cache, cache.Config] {
|
||||
@@ -22,7 +21,14 @@ func NewFactory() factory.ProviderFactory[cache.Cache, cache.Config] {
|
||||
}
|
||||
|
||||
func New(ctx context.Context, settings factory.ProviderSettings, config cache.Config) (cache.Cache, error) {
|
||||
return &provider{opts: config.Redis}, nil
|
||||
provider := new(provider)
|
||||
redisClient := redis.NewClient(&redis.Options{
|
||||
Addr: fmt.Sprintf("%s:%d", config.Redis.Host, config.Redis.Port),
|
||||
Password: config.Redis.Password,
|
||||
DB: config.Redis.DB,
|
||||
})
|
||||
provider.client = redisClient
|
||||
return provider, nil
|
||||
}
|
||||
|
||||
// WithClient creates a new cache with the given client
|
||||
@@ -30,24 +36,14 @@ func WithClient(client *redis.Client) *provider {
|
||||
return &provider{client: client}
|
||||
}
|
||||
|
||||
// Connect connects to the redis server
|
||||
func (c *provider) Connect(_ context.Context) error {
|
||||
c.client = redis.NewClient(&redis.Options{
|
||||
Addr: fmt.Sprintf("%s:%d", c.opts.Host, c.opts.Port),
|
||||
Password: c.opts.Password,
|
||||
DB: c.opts.DB,
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// Store stores the data in the cache
|
||||
func (c *provider) Store(ctx context.Context, cacheKey string, data cache.CacheableEntity, ttl time.Duration) error {
|
||||
return c.client.Set(ctx, cacheKey, data, ttl).Err()
|
||||
func (c *provider) Set(ctx context.Context, orgID string, cacheKey string, data cache.CacheableEntity, ttl time.Duration) error {
|
||||
return c.client.Set(ctx, fmt.Sprintf("%s::%s", orgID, cacheKey), data, ttl).Err()
|
||||
}
|
||||
|
||||
// Retrieve retrieves the data from the cache
|
||||
func (c *provider) Retrieve(ctx context.Context, cacheKey string, dest cache.CacheableEntity, allowExpired bool) (cache.RetrieveStatus, error) {
|
||||
err := c.client.Get(ctx, cacheKey).Scan(dest)
|
||||
func (c *provider) Get(ctx context.Context, orgID string, cacheKey string, dest cache.CacheableEntity, allowExpired bool) (cache.RetrieveStatus, error) {
|
||||
err := c.client.Get(ctx, fmt.Sprintf("%s::%s", orgID, cacheKey)).Scan(dest)
|
||||
if err != nil {
|
||||
if errors.Is(err, redis.Nil) {
|
||||
return cache.RetrieveStatusKeyMiss, nil
|
||||
@@ -57,23 +53,19 @@ func (c *provider) Retrieve(ctx context.Context, cacheKey string, dest cache.Cac
|
||||
return cache.RetrieveStatusHit, nil
|
||||
}
|
||||
|
||||
// SetTTL sets the TTL for the cache entry
|
||||
func (c *provider) SetTTL(ctx context.Context, cacheKey string, ttl time.Duration) {
|
||||
err := c.client.Expire(ctx, cacheKey, ttl).Err()
|
||||
if err != nil {
|
||||
zap.L().Error("error setting TTL for cache key", zap.String("cacheKey", cacheKey), zap.Duration("ttl", ttl), zap.Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
// Remove removes the cache entry
|
||||
func (c *provider) Remove(ctx context.Context, cacheKey string) {
|
||||
c.BulkRemove(ctx, []string{cacheKey})
|
||||
func (c *provider) Delete(ctx context.Context, orgID string, cacheKey string) {
|
||||
c.DeleteMany(ctx, orgID, []string{cacheKey})
|
||||
}
|
||||
|
||||
// BulkRemove removes the cache entries
|
||||
func (c *provider) BulkRemove(ctx context.Context, cacheKeys []string) {
|
||||
func (c *provider) DeleteMany(ctx context.Context, orgID string, cacheKeys []string) {
|
||||
updatedCacheKeys := []string{}
|
||||
for _, cacheKey := range cacheKeys {
|
||||
updatedCacheKeys = append(updatedCacheKeys, fmt.Sprintf("%s::%s", orgID, cacheKey))
|
||||
}
|
||||
if err := c.client.Del(ctx, cacheKeys...).Err(); err != nil {
|
||||
zap.L().Error("error deleting cache keys", zap.Strings("cacheKeys", cacheKeys), zap.Error(err))
|
||||
zap.L().Error("error deleting cache keys", zap.Strings("cacheKeys", updatedCacheKeys), zap.Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -81,40 +73,3 @@ func (c *provider) BulkRemove(ctx context.Context, cacheKeys []string) {
|
||||
func (c *provider) Close(_ context.Context) error {
|
||||
return c.client.Close()
|
||||
}
|
||||
|
||||
// Ping pings the redis server
|
||||
func (c *provider) Ping(ctx context.Context) error {
|
||||
return c.client.Ping(ctx).Err()
|
||||
}
|
||||
|
||||
// GetClient returns the redis client
|
||||
func (c *provider) GetClient() *redis.Client {
|
||||
return c.client
|
||||
}
|
||||
|
||||
// GetTTL returns the TTL for the cache entry
|
||||
func (c *provider) GetTTL(ctx context.Context, cacheKey string) time.Duration {
|
||||
ttl, err := c.client.TTL(ctx, cacheKey).Result()
|
||||
if err != nil {
|
||||
zap.L().Error("error getting TTL for cache key", zap.String("cacheKey", cacheKey), zap.Error(err))
|
||||
}
|
||||
return ttl
|
||||
}
|
||||
|
||||
// GetKeys returns the keys matching the pattern
|
||||
func (c *provider) GetKeys(ctx context.Context, pattern string) ([]string, error) {
|
||||
return c.client.Keys(ctx, pattern).Result()
|
||||
}
|
||||
|
||||
// GetKeysWithTTL returns the keys matching the pattern with their TTL
|
||||
func (c *provider) GetKeysWithTTL(ctx context.Context, pattern string) (map[string]time.Duration, error) {
|
||||
keys, err := c.GetKeys(ctx, pattern)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result := make(map[string]time.Duration)
|
||||
for _, key := range keys {
|
||||
result[key] = c.GetTTL(ctx, key)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
@@ -795,9 +795,9 @@ func (r *ClickHouseReader) GetSpansForTrace(ctx context.Context, traceID string,
|
||||
return searchScanResponses, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadataCache(ctx context.Context, traceID string) (*model.GetWaterfallSpansForTraceWithMetadataCache, error) {
|
||||
func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadataCache(ctx context.Context, orgID string, traceID string) (*model.GetWaterfallSpansForTraceWithMetadataCache, error) {
|
||||
cachedTraceData := new(model.GetWaterfallSpansForTraceWithMetadataCache)
|
||||
cacheStatus, err := r.cache.Retrieve(ctx, fmt.Sprintf("getWaterfallSpansForTraceWithMetadata-%v", traceID), cachedTraceData, false)
|
||||
cacheStatus, err := r.cache.Get(ctx, orgID, fmt.Sprintf("getWaterfallSpansForTraceWithMetadata-%v", traceID), cachedTraceData, false)
|
||||
if err != nil {
|
||||
zap.L().Debug("error in retrieving getWaterfallSpansForTraceWithMetadata cache", zap.Error(err), zap.String("traceID", traceID))
|
||||
return nil, err
|
||||
@@ -816,7 +816,7 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadataCache(ctx contex
|
||||
return cachedTraceData, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Context, traceID string, req *model.GetWaterfallSpansForTraceWithMetadataParams) (*model.GetWaterfallSpansForTraceWithMetadataResponse, *model.ApiError) {
|
||||
func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Context, orgID string, traceID string, req *model.GetWaterfallSpansForTraceWithMetadataParams) (*model.GetWaterfallSpansForTraceWithMetadataResponse, *model.ApiError) {
|
||||
response := new(model.GetWaterfallSpansForTraceWithMetadataResponse)
|
||||
var startTime, endTime, durationNano, totalErrorSpans, totalSpans uint64
|
||||
var spanIdToSpanNodeMap = map[string]*model.Span{}
|
||||
@@ -826,7 +826,7 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
|
||||
var hasMissingSpans bool
|
||||
|
||||
claims, errv2 := authtypes.ClaimsFromContext(ctx)
|
||||
cachedTraceData, err := r.GetWaterfallSpansForTraceWithMetadataCache(ctx, traceID)
|
||||
cachedTraceData, err := r.GetWaterfallSpansForTraceWithMetadataCache(ctx, orgID, traceID)
|
||||
if err == nil {
|
||||
startTime = cachedTraceData.StartTime
|
||||
endTime = cachedTraceData.EndTime
|
||||
@@ -984,7 +984,7 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
|
||||
}
|
||||
|
||||
zap.L().Info("getWaterfallSpansForTraceWithMetadata: processing pre cache", zap.Duration("duration", time.Since(processingBeforeCache)), zap.String("traceID", traceID))
|
||||
cacheErr := r.cache.Store(ctx, fmt.Sprintf("getWaterfallSpansForTraceWithMetadata-%v", traceID), &traceCache, time.Minute*5)
|
||||
cacheErr := r.cache.Set(ctx, orgID, fmt.Sprintf("getWaterfallSpansForTraceWithMetadata-%v", traceID), &traceCache, time.Minute*5)
|
||||
if cacheErr != nil {
|
||||
zap.L().Debug("failed to store cache for getWaterfallSpansForTraceWithMetadata", zap.String("traceID", traceID), zap.Error(err))
|
||||
}
|
||||
@@ -1007,9 +1007,9 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetFlamegraphSpansForTraceCache(ctx context.Context, traceID string) (*model.GetFlamegraphSpansForTraceCache, error) {
|
||||
func (r *ClickHouseReader) GetFlamegraphSpansForTraceCache(ctx context.Context, orgID string, traceID string) (*model.GetFlamegraphSpansForTraceCache, error) {
|
||||
cachedTraceData := new(model.GetFlamegraphSpansForTraceCache)
|
||||
cacheStatus, err := r.cache.Retrieve(ctx, fmt.Sprintf("getFlamegraphSpansForTrace-%v", traceID), cachedTraceData, false)
|
||||
cacheStatus, err := r.cache.Get(ctx, orgID, fmt.Sprintf("getFlamegraphSpansForTrace-%v", traceID), cachedTraceData, false)
|
||||
if err != nil {
|
||||
zap.L().Debug("error in retrieving getFlamegraphSpansForTrace cache", zap.Error(err), zap.String("traceID", traceID))
|
||||
return nil, err
|
||||
@@ -1028,7 +1028,7 @@ func (r *ClickHouseReader) GetFlamegraphSpansForTraceCache(ctx context.Context,
|
||||
return cachedTraceData, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, traceID string, req *model.GetFlamegraphSpansForTraceParams) (*model.GetFlamegraphSpansForTraceResponse, *model.ApiError) {
|
||||
func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, orgID string, traceID string, req *model.GetFlamegraphSpansForTraceParams) (*model.GetFlamegraphSpansForTraceResponse, *model.ApiError) {
|
||||
trace := new(model.GetFlamegraphSpansForTraceResponse)
|
||||
var startTime, endTime, durationNano uint64
|
||||
var spanIdToSpanNodeMap = map[string]*model.FlamegraphSpan{}
|
||||
@@ -1037,7 +1037,7 @@ func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, trace
|
||||
var traceRoots []*model.FlamegraphSpan
|
||||
|
||||
// get the trace tree from cache!
|
||||
cachedTraceData, err := r.GetFlamegraphSpansForTraceCache(ctx, traceID)
|
||||
cachedTraceData, err := r.GetFlamegraphSpansForTraceCache(ctx, orgID, traceID)
|
||||
|
||||
if err == nil {
|
||||
startTime = cachedTraceData.StartTime
|
||||
@@ -1136,7 +1136,7 @@ func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, trace
|
||||
}
|
||||
|
||||
zap.L().Info("getFlamegraphSpansForTrace: processing pre cache", zap.Duration("duration", time.Since(processingBeforeCache)), zap.String("traceID", traceID))
|
||||
cacheErr := r.cache.Store(ctx, fmt.Sprintf("getFlamegraphSpansForTrace-%v", traceID), &traceCache, time.Minute*5)
|
||||
cacheErr := r.cache.Set(ctx, orgID, fmt.Sprintf("getFlamegraphSpansForTrace-%v", traceID), &traceCache, time.Minute*5)
|
||||
if cacheErr != nil {
|
||||
zap.L().Debug("failed to store cache for getFlamegraphSpansForTrace", zap.String("traceID", traceID), zap.Error(err))
|
||||
}
|
||||
@@ -5187,7 +5187,7 @@ func (r *ClickHouseReader) GetActiveTimeSeriesForMetricName(ctx context.Context,
|
||||
return timeSeries, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_explorer.SummaryListMetricsRequest) (*metrics_explorer.SummaryListMetricsResponse, *model.ApiError) {
|
||||
func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID string, req *metrics_explorer.SummaryListMetricsRequest) (*metrics_explorer.SummaryListMetricsResponse, *model.ApiError) {
|
||||
var args []interface{}
|
||||
|
||||
// Build filter conditions (if any)
|
||||
@@ -5365,7 +5365,7 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
|
||||
}
|
||||
|
||||
//get updated metrics data
|
||||
batch, apiError := r.GetUpdatedMetricsMetadata(ctx, metricNames...)
|
||||
batch, apiError := r.GetUpdatedMetricsMetadata(ctx, orgID, metricNames...)
|
||||
if apiError != nil {
|
||||
zap.L().Error("Error in getting metrics cached metadata", zap.Error(apiError))
|
||||
}
|
||||
@@ -6022,18 +6022,18 @@ LIMIT 40`, // added rand to get diff value every time we run this query
|
||||
return fingerprints, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) DeleteMetricsMetadata(ctx context.Context, metricName string) *model.ApiError {
|
||||
func (r *ClickHouseReader) DeleteMetricsMetadata(ctx context.Context, orgID string, metricName string) *model.ApiError {
|
||||
delQuery := fmt.Sprintf(`ALTER TABLE %s.%s DELETE WHERE metric_name = ?;`, signozMetricDBName, signozUpdatedMetricsMetadataLocalTable)
|
||||
valueCtx := context.WithValue(ctx, "clickhouse_max_threads", constants.MetricsExplorerClickhouseThreads)
|
||||
err := r.db.Exec(valueCtx, delQuery, metricName)
|
||||
if err != nil {
|
||||
return &model.ApiError{Typ: "ClickHouseError", Err: err}
|
||||
}
|
||||
r.cache.Remove(ctx, constants.UpdatedMetricsMetadataCachePrefix+metricName)
|
||||
r.cache.Delete(ctx, orgID, constants.UpdatedMetricsMetadataCachePrefix+metricName)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) UpdateMetricsMetadata(ctx context.Context, req *model.UpdateMetricsMetadata) *model.ApiError {
|
||||
func (r *ClickHouseReader) UpdateMetricsMetadata(ctx context.Context, orgID string, req *model.UpdateMetricsMetadata) *model.ApiError {
|
||||
if req.MetricType == v3.MetricTypeHistogram {
|
||||
labels := []string{"le"}
|
||||
hasLabels, apiError := r.CheckForLabelsInMetric(ctx, req.MetricName, labels)
|
||||
@@ -6062,7 +6062,7 @@ func (r *ClickHouseReader) UpdateMetricsMetadata(ctx context.Context, req *model
|
||||
}
|
||||
}
|
||||
|
||||
apiErr := r.DeleteMetricsMetadata(ctx, req.MetricName)
|
||||
apiErr := r.DeleteMetricsMetadata(ctx, orgID, req.MetricName)
|
||||
if apiErr != nil {
|
||||
return apiErr
|
||||
}
|
||||
@@ -6073,7 +6073,7 @@ VALUES ( ?, ?, ?, ?, ?, ?, ?);`, signozMetricDBName, signozUpdatedMetricsMetadat
|
||||
if err != nil {
|
||||
return &model.ApiError{Typ: "ClickHouseError", Err: err}
|
||||
}
|
||||
err = r.cache.Store(ctx, constants.UpdatedMetricsMetadataCachePrefix+req.MetricName, req, -1)
|
||||
err = r.cache.Set(ctx, orgID, constants.UpdatedMetricsMetadataCachePrefix+req.MetricName, req, -1)
|
||||
if err != nil {
|
||||
return &model.ApiError{Typ: "CachingErr", Err: err}
|
||||
}
|
||||
@@ -6114,7 +6114,7 @@ func (r *ClickHouseReader) CheckForLabelsInMetric(ctx context.Context, metricNam
|
||||
return hasLE, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) PreloadMetricsMetadata(ctx context.Context) []error {
|
||||
func (r *ClickHouseReader) PreloadMetricsMetadata(ctx context.Context, orgID string) []error {
|
||||
var allMetricsMetadata []model.UpdateMetricsMetadata
|
||||
var errorList []error
|
||||
// Fetch all rows from ClickHouse
|
||||
@@ -6127,7 +6127,7 @@ func (r *ClickHouseReader) PreloadMetricsMetadata(ctx context.Context) []error {
|
||||
return errorList
|
||||
}
|
||||
for _, m := range allMetricsMetadata {
|
||||
err := r.cache.Store(ctx, constants.UpdatedMetricsMetadataCachePrefix+m.MetricName, &m, -1)
|
||||
err := r.cache.Set(ctx, orgID, constants.UpdatedMetricsMetadataCachePrefix+m.MetricName, &m, -1)
|
||||
if err != nil {
|
||||
errorList = append(errorList, err)
|
||||
}
|
||||
@@ -6136,7 +6136,7 @@ func (r *ClickHouseReader) PreloadMetricsMetadata(ctx context.Context) []error {
|
||||
return errorList
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetUpdatedMetricsMetadata(ctx context.Context, metricNames ...string) (map[string]*model.UpdateMetricsMetadata, *model.ApiError) {
|
||||
func (r *ClickHouseReader) GetUpdatedMetricsMetadata(ctx context.Context, orgID string, metricNames ...string) (map[string]*model.UpdateMetricsMetadata, *model.ApiError) {
|
||||
cachedMetadata := make(map[string]*model.UpdateMetricsMetadata)
|
||||
var missingMetrics []string
|
||||
|
||||
@@ -6144,7 +6144,7 @@ func (r *ClickHouseReader) GetUpdatedMetricsMetadata(ctx context.Context, metric
|
||||
for _, metricName := range metricNames {
|
||||
metadata := new(model.UpdateMetricsMetadata)
|
||||
cacheKey := constants.UpdatedMetricsMetadataCachePrefix + metricName
|
||||
retrieveStatus, err := r.cache.Retrieve(ctx, cacheKey, metadata, true)
|
||||
retrieveStatus, err := r.cache.Get(ctx, orgID, cacheKey, metadata, true)
|
||||
if err == nil && retrieveStatus == cache.RetrieveStatusHit {
|
||||
cachedMetadata[metricName] = metadata
|
||||
} else {
|
||||
@@ -6185,7 +6185,7 @@ func (r *ClickHouseReader) GetUpdatedMetricsMetadata(ctx context.Context, metric
|
||||
|
||||
// Cache the result for future requests.
|
||||
cacheKey := constants.UpdatedMetricsMetadataCachePrefix + metadata.MetricName
|
||||
if cacheErr := r.cache.Store(ctx, cacheKey, metadata, -1); cacheErr != nil {
|
||||
if cacheErr := r.cache.Set(ctx, orgID, cacheKey, metadata, -1); cacheErr != nil {
|
||||
zap.L().Error("Failed to store metrics metadata in cache", zap.String("metric_name", metadata.MetricName), zap.Error(cacheErr))
|
||||
}
|
||||
cachedMetadata[metadata.MetricName] = metadata
|
||||
|
||||
@@ -52,7 +52,6 @@ import (
|
||||
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
|
||||
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/contextlinks"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
|
||||
@@ -168,9 +167,6 @@ type APIHandlerOpts struct {
|
||||
// Log parsing pipelines
|
||||
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
|
||||
|
||||
// cache
|
||||
Cache cache.Cache
|
||||
|
||||
// Querier Influx Interval
|
||||
FluxInterval time.Duration
|
||||
|
||||
@@ -187,14 +183,14 @@ type APIHandlerOpts struct {
|
||||
func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
querierOpts := querier.QuerierOptions{
|
||||
Reader: opts.Reader,
|
||||
Cache: opts.Cache,
|
||||
Cache: opts.Signoz.Cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: opts.FluxInterval,
|
||||
}
|
||||
|
||||
querierOptsV2 := querierV2.QuerierOptions{
|
||||
Reader: opts.Reader,
|
||||
Cache: opts.Cache,
|
||||
Cache: opts.Signoz.Cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: opts.FluxInterval,
|
||||
}
|
||||
|
||||
@@ -1,8 +1,14 @@
|
||||
package thirdPartyApi
|
||||
|
||||
import (
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"net"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
const (
|
||||
urlPathKey = "http.url"
|
||||
serverNameKey = "net.peer.name"
|
||||
)
|
||||
|
||||
var defaultStepInterval int64 = 60
|
||||
@@ -17,7 +23,7 @@ func FilterResponse(results []*v3.Result) []*v3.Result {
|
||||
filteredRows := make([]*v3.TableRow, 0, len(res.Table.Rows))
|
||||
for _, row := range res.Table.Rows {
|
||||
if row.Data != nil {
|
||||
if domainVal, ok := row.Data["net.peer.name"]; ok {
|
||||
if domainVal, ok := row.Data[serverNameKey]; ok {
|
||||
if domainStr, ok := domainVal.(string); ok {
|
||||
if net.ParseIP(domainStr) != nil {
|
||||
continue
|
||||
@@ -63,7 +69,7 @@ func BuildDomainList(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
StepInterval: defaultStepInterval,
|
||||
AggregateOperator: v3.AggregateOperatorCountDistinct,
|
||||
AggregateAttribute: v3.AttributeKey{
|
||||
Key: "http.url",
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
@@ -74,7 +80,7 @@ func BuildDomainList(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "http.url",
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
@@ -96,7 +102,7 @@ func BuildDomainList(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
Expression: "endpoints",
|
||||
GroupBy: getGroupBy([]v3.AttributeKey{
|
||||
{
|
||||
Key: "net.peer.name",
|
||||
Key: serverNameKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
@@ -122,7 +128,7 @@ func BuildDomainList(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "http.url",
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
@@ -144,7 +150,7 @@ func BuildDomainList(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
Expression: "lastseen",
|
||||
GroupBy: getGroupBy([]v3.AttributeKey{
|
||||
{
|
||||
Key: "net.peer.name",
|
||||
Key: serverNameKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
@@ -170,7 +176,7 @@ func BuildDomainList(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "http.url",
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
@@ -192,7 +198,7 @@ func BuildDomainList(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
Expression: "rps",
|
||||
GroupBy: getGroupBy([]v3.AttributeKey{
|
||||
{
|
||||
Key: "net.peer.name",
|
||||
Key: serverNameKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
@@ -228,7 +234,7 @@ func BuildDomainList(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "http.url",
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
@@ -250,7 +256,7 @@ func BuildDomainList(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
Expression: "error",
|
||||
GroupBy: getGroupBy([]v3.AttributeKey{
|
||||
{
|
||||
Key: "net.peer.name",
|
||||
Key: serverNameKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
@@ -329,7 +335,7 @@ func BuildDomainList(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "http.url",
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
@@ -351,7 +357,7 @@ func BuildDomainList(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
Expression: "p99",
|
||||
GroupBy: getGroupBy([]v3.AttributeKey{
|
||||
{
|
||||
Key: "net.peer.name",
|
||||
Key: serverNameKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
@@ -401,7 +407,7 @@ func BuildDomainInfo(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
StepInterval: defaultStepInterval,
|
||||
AggregateOperator: v3.AggregateOperatorCount,
|
||||
AggregateAttribute: v3.AttributeKey{
|
||||
Key: "http.url",
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
@@ -412,7 +418,7 @@ func BuildDomainInfo(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "http.url",
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
@@ -435,7 +441,7 @@ func BuildDomainInfo(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
Disabled: false,
|
||||
GroupBy: getGroupBy([]v3.AttributeKey{
|
||||
{
|
||||
Key: "http.url",
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
@@ -461,7 +467,7 @@ func BuildDomainInfo(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "http.url",
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
@@ -503,7 +509,7 @@ func BuildDomainInfo(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "http.url",
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
@@ -544,7 +550,7 @@ func BuildDomainInfo(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, er
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "http.url",
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
|
||||
@@ -13,7 +13,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
@@ -106,10 +106,10 @@ func (q *querier) runBuilderQuery(
|
||||
return
|
||||
}
|
||||
|
||||
misses := q.queryCache.FindMissingTimeRanges(start, end, builderQuery.StepInterval, cacheKeys[queryName])
|
||||
misses := q.queryCache.FindMissingTimeRanges(ctx, start, end, builderQuery.StepInterval, cacheKeys[queryName])
|
||||
zap.L().Info("cache misses for logs query", zap.Any("misses", misses))
|
||||
missedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
filteredMissedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
missedSeries := make([]*querybuildertypes.SeriesData, 0)
|
||||
filteredMissedSeries := make([]*querybuildertypes.SeriesData, 0)
|
||||
for _, miss := range misses {
|
||||
query, err = prepareLogsQuery(ctx, miss.Start, miss.End, builderQuery, params)
|
||||
if err != nil {
|
||||
@@ -131,7 +131,7 @@ func (q *querier) runBuilderQuery(
|
||||
// making sure that empty range doesn't doesn't enter the cache
|
||||
// empty results from filteredSeries means data was filtered out, but empty series means actual empty data
|
||||
if len(filteredSeries) > 0 || len(series) == 0 {
|
||||
filteredMissedSeries = append(filteredMissedSeries, querycache.CachedSeriesData{
|
||||
filteredMissedSeries = append(filteredMissedSeries, &querybuildertypes.SeriesData{
|
||||
Data: filteredSeries,
|
||||
Start: startTime,
|
||||
End: endTime,
|
||||
@@ -139,17 +139,17 @@ func (q *querier) runBuilderQuery(
|
||||
}
|
||||
|
||||
// for the actual response
|
||||
missedSeries = append(missedSeries, querycache.CachedSeriesData{
|
||||
missedSeries = append(missedSeries, &querybuildertypes.SeriesData{
|
||||
Data: series,
|
||||
Start: miss.Start,
|
||||
End: miss.End,
|
||||
})
|
||||
}
|
||||
|
||||
filteredMergedSeries := q.queryCache.MergeWithCachedSeriesDataV2(cacheKeys[queryName], filteredMissedSeries)
|
||||
q.queryCache.StoreSeriesInCache(cacheKeys[queryName], filteredMergedSeries)
|
||||
filteredMergedSeries := q.queryCache.MergeWithCachedSeriesDataV2(ctx, cacheKeys[queryName], filteredMissedSeries)
|
||||
q.queryCache.StoreSeriesInCache(ctx, cacheKeys[queryName], filteredMergedSeries)
|
||||
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesDataV2(cacheKeys[queryName], missedSeries)
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesDataV2(ctx, cacheKeys[queryName], missedSeries)
|
||||
|
||||
resultSeries := common.GetSeriesFromCachedDataV2(mergedSeries, start, end, builderQuery.StepInterval)
|
||||
|
||||
@@ -238,9 +238,9 @@ func (q *querier) runBuilderQuery(
|
||||
}
|
||||
|
||||
cacheKey := cacheKeys[queryName]
|
||||
misses := q.queryCache.FindMissingTimeRanges(start, end, builderQuery.StepInterval, cacheKey)
|
||||
misses := q.queryCache.FindMissingTimeRanges(ctx, start, end, builderQuery.StepInterval, cacheKey)
|
||||
zap.L().Info("cache misses for metrics query", zap.Any("misses", misses))
|
||||
missedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
missedSeries := make([]*querybuildertypes.SeriesData, 0)
|
||||
for _, miss := range misses {
|
||||
query, err := metricsV3.PrepareMetricQuery(
|
||||
miss.Start,
|
||||
@@ -269,13 +269,13 @@ func (q *querier) runBuilderQuery(
|
||||
}
|
||||
return
|
||||
}
|
||||
missedSeries = append(missedSeries, querycache.CachedSeriesData{
|
||||
missedSeries = append(missedSeries, &querybuildertypes.SeriesData{
|
||||
Start: miss.Start,
|
||||
End: miss.End,
|
||||
Data: series,
|
||||
})
|
||||
}
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesData(cacheKey, missedSeries)
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesData(ctx, cacheKey, missedSeries)
|
||||
|
||||
resultSeries := common.GetSeriesFromCachedData(mergedSeries, start, end)
|
||||
|
||||
@@ -314,9 +314,9 @@ func (q *querier) runBuilderExpression(
|
||||
|
||||
cacheKey := cacheKeys[queryName]
|
||||
step := postprocess.StepIntervalForFunction(params, queryName)
|
||||
misses := q.queryCache.FindMissingTimeRanges(params.Start, params.End, step, cacheKey)
|
||||
misses := q.queryCache.FindMissingTimeRanges(ctx, params.Start, params.End, step, cacheKey)
|
||||
zap.L().Info("cache misses for expression query", zap.Any("misses", misses))
|
||||
missedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
missedSeries := make([]*querybuildertypes.SeriesData, 0)
|
||||
for _, miss := range misses {
|
||||
missQueries, _ := q.builder.PrepareQueries(&v3.QueryRangeParamsV3{
|
||||
Start: miss.Start,
|
||||
@@ -332,13 +332,13 @@ func (q *querier) runBuilderExpression(
|
||||
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
|
||||
return
|
||||
}
|
||||
missedSeries = append(missedSeries, querycache.CachedSeriesData{
|
||||
missedSeries = append(missedSeries, &querybuildertypes.SeriesData{
|
||||
Start: miss.Start,
|
||||
End: miss.End,
|
||||
Data: series,
|
||||
})
|
||||
}
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesData(cacheKey, missedSeries)
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesData(ctx, cacheKey, missedSeries)
|
||||
|
||||
resultSeries := common.GetSeriesFromCachedData(mergedSeries, params.Start, params.End)
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
logsV4 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v4"
|
||||
metricsV3 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
@@ -13,10 +14,9 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
chErrors "github.com/SigNoz/signoz/pkg/query-service/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
@@ -68,7 +68,7 @@ func NewQuerier(opts QuerierOptions) interfaces.Querier {
|
||||
logsQueryBuilder := logsV4.PrepareLogsQuery
|
||||
tracesQueryBuilder := tracesV4.PrepareTracesQuery
|
||||
|
||||
qc := querycache.NewQueryCache(querycache.WithCache(opts.Cache), querycache.WithFluxInterval(opts.FluxInterval))
|
||||
qc := querybuildertypes.NewQueryCache(querybuildertypes.WithCache(opts.Cache), querybuildertypes.WithFluxInterval(opts.FluxInterval))
|
||||
|
||||
return &querier{
|
||||
cache: opts.Cache,
|
||||
@@ -210,9 +210,9 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam
|
||||
channelResults <- channelResult{Err: err, Name: queryName, Query: query.Query, Series: series}
|
||||
return
|
||||
}
|
||||
misses := q.queryCache.FindMissingTimeRanges(params.Start, params.End, params.Step, cacheKey)
|
||||
misses := q.queryCache.FindMissingTimeRanges(ctx, params.Start, params.End, params.Step, cacheKey)
|
||||
zap.L().Info("cache misses for metrics prom query", zap.Any("misses", misses))
|
||||
missedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
missedSeries := make([]*querybuildertypes.SeriesData, 0)
|
||||
for _, miss := range misses {
|
||||
query := metricsV3.BuildPromQuery(promQuery, params.Step, miss.Start, miss.End)
|
||||
series, err := q.execPromQuery(ctx, query)
|
||||
@@ -220,13 +220,13 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam
|
||||
channelResults <- channelResult{Err: err, Name: queryName, Query: query.Query, Series: nil}
|
||||
return
|
||||
}
|
||||
missedSeries = append(missedSeries, querycache.CachedSeriesData{
|
||||
missedSeries = append(missedSeries, &querybuildertypes.SeriesData{
|
||||
Data: series,
|
||||
Start: miss.Start,
|
||||
End: miss.End,
|
||||
})
|
||||
}
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesData(cacheKey, missedSeries)
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesData(ctx, cacheKey, missedSeries)
|
||||
resultSeries := common.GetSeriesFromCachedData(mergedSeries, params.Start, params.End)
|
||||
channelResults <- channelResult{Err: nil, Name: queryName, Query: promQuery.Query, Series: resultSeries}
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ package querier
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"strings"
|
||||
@@ -10,18 +9,20 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/DATA-DOG/go-sqlmock"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/cache/memorycache"
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus/prometheustest"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache/inmemory"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes"
|
||||
cmock "github.com/srikanthccv/ClickHouse-go-mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
@@ -63,7 +64,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
|
||||
requestedEnd int64 // in milliseconds
|
||||
requestedStep int64 // in seconds
|
||||
cachedSeries []*v3.Series
|
||||
expectedMiss []querycache.MissInterval
|
||||
expectedMiss []*querybuildertypes.MissInterval
|
||||
replaceCachedData bool
|
||||
}{
|
||||
{
|
||||
@@ -88,7 +89,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*querybuildertypes.MissInterval{
|
||||
{
|
||||
Start: 1675115596722,
|
||||
End: 1675115596722 + 60*60*1000,
|
||||
@@ -129,7 +130,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{},
|
||||
expectedMiss: []*querybuildertypes.MissInterval{},
|
||||
},
|
||||
{
|
||||
name: "cached time range is a left overlap of the requested time range",
|
||||
@@ -157,7 +158,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*querybuildertypes.MissInterval{
|
||||
{
|
||||
Start: 1675115596722 + 120*60*1000,
|
||||
End: 1675115596722 + 180*60*1000,
|
||||
@@ -190,7 +191,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*querybuildertypes.MissInterval{
|
||||
{
|
||||
Start: 1675115596722,
|
||||
End: 1675115596722 + 60*60*1000,
|
||||
@@ -223,7 +224,7 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*querybuildertypes.MissInterval{
|
||||
{
|
||||
Start: 1675115596722,
|
||||
End: 1675115596722 + 180*60*1000,
|
||||
@@ -233,28 +234,34 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
c := inmemory.New(&inmemory.Options{TTL: 5 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
|
||||
qc := querycache.NewQueryCache(querycache.WithCache(c))
|
||||
opts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
qc := querybuildertypes.NewQueryCache(querybuildertypes.WithCache(c))
|
||||
|
||||
for idx, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cacheKey := fmt.Sprintf("test-cache-key-%d", idx)
|
||||
cachedData := &querycache.CachedSeriesData{
|
||||
cachedData := &querybuildertypes.SeriesData{
|
||||
Start: minTimestamp(tc.cachedSeries),
|
||||
End: maxTimestamp(tc.cachedSeries),
|
||||
Data: tc.cachedSeries,
|
||||
}
|
||||
jsonData, err := json.Marshal([]*querycache.CachedSeriesData{cachedData})
|
||||
data := querybuildertypes.CachedSeriesData{Series: []*querybuildertypes.SeriesData{cachedData}}
|
||||
if err != nil {
|
||||
t.Errorf("error marshalling cached data: %v", err)
|
||||
}
|
||||
err = c.Store(cacheKey, jsonData, 5*time.Minute)
|
||||
err = c.Store(context.Background(), cacheKey, &data, 5*time.Minute)
|
||||
if err != nil {
|
||||
t.Errorf("error storing cached data: %v", err)
|
||||
}
|
||||
|
||||
misses := qc.FindMissingTimeRanges(tc.requestedStart, tc.requestedEnd, tc.requestedStep, cacheKey)
|
||||
misses := qc.FindMissingTimeRanges(context.Background(), tc.requestedStart, tc.requestedEnd, tc.requestedStep, cacheKey)
|
||||
if len(misses) != len(tc.expectedMiss) {
|
||||
t.Errorf("expected %d misses, got %d", len(tc.expectedMiss), len(misses))
|
||||
}
|
||||
@@ -280,7 +287,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
|
||||
requestedStep int64
|
||||
cachedSeries []*v3.Series
|
||||
fluxInterval time.Duration
|
||||
expectedMiss []querycache.MissInterval
|
||||
expectedMiss []*querybuildertypes.MissInterval
|
||||
}{
|
||||
{
|
||||
name: "cached time range is a subset of the requested time range",
|
||||
@@ -305,7 +312,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
|
||||
},
|
||||
},
|
||||
fluxInterval: 5 * time.Minute,
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*querybuildertypes.MissInterval{
|
||||
{
|
||||
Start: 1675115596722,
|
||||
End: 1675115596722 + 60*60*1000,
|
||||
@@ -347,7 +354,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
|
||||
},
|
||||
},
|
||||
fluxInterval: 5 * time.Minute,
|
||||
expectedMiss: []querycache.MissInterval{},
|
||||
expectedMiss: []*querybuildertypes.MissInterval{},
|
||||
},
|
||||
{
|
||||
name: "cache time range is a left overlap of the requested time range",
|
||||
@@ -376,7 +383,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
|
||||
},
|
||||
},
|
||||
fluxInterval: 5 * time.Minute,
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*querybuildertypes.MissInterval{
|
||||
{
|
||||
Start: 1675115596722 + 120*60*1000,
|
||||
End: 1675115596722 + 180*60*1000,
|
||||
@@ -410,7 +417,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
|
||||
},
|
||||
},
|
||||
fluxInterval: 5 * time.Minute,
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*querybuildertypes.MissInterval{
|
||||
{
|
||||
Start: 1675115596722,
|
||||
End: 1675115596722 + 60*60*1000,
|
||||
@@ -444,7 +451,7 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
|
||||
},
|
||||
},
|
||||
fluxInterval: 5 * time.Minute,
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*querybuildertypes.MissInterval{
|
||||
{
|
||||
Start: 1675115596722,
|
||||
End: 1675115596722 + 180*60*1000,
|
||||
@@ -453,27 +460,30 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
c := inmemory.New(&inmemory.Options{TTL: 5 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
|
||||
qc := querycache.NewQueryCache(querycache.WithCache(c))
|
||||
opts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
qc := querybuildertypes.NewQueryCache(querybuildertypes.WithCache(c))
|
||||
|
||||
for idx, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cacheKey := fmt.Sprintf("test-cache-key-%d", idx)
|
||||
cachedData := &querycache.CachedSeriesData{
|
||||
cachedData := &querybuildertypes.SeriesData{
|
||||
Start: minTimestamp(tc.cachedSeries),
|
||||
End: maxTimestamp(tc.cachedSeries),
|
||||
Data: tc.cachedSeries,
|
||||
}
|
||||
jsonData, err := json.Marshal([]*querycache.CachedSeriesData{cachedData})
|
||||
if err != nil {
|
||||
t.Errorf("error marshalling cached data: %v", err)
|
||||
}
|
||||
err = c.Store(cacheKey, jsonData, 5*time.Minute)
|
||||
data := querybuildertypes.CachedSeriesData{Series: []*querybuildertypes.SeriesData{cachedData}}
|
||||
err = c.Store(context.Background(), cacheKey, &data, 5*time.Minute)
|
||||
if err != nil {
|
||||
t.Errorf("error storing cached data: %v", err)
|
||||
}
|
||||
misses := qc.FindMissingTimeRanges(tc.requestedStart, tc.requestedEnd, tc.requestedStep, cacheKey)
|
||||
misses := qc.FindMissingTimeRanges(context.Background(), tc.requestedStart, tc.requestedEnd, tc.requestedStep, cacheKey)
|
||||
if len(misses) != len(tc.expectedMiss) {
|
||||
t.Errorf("expected %d misses, got %d", len(tc.expectedMiss), len(misses))
|
||||
}
|
||||
@@ -625,9 +635,16 @@ func TestQueryRange(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
cache := inmemory.New(&inmemory.Options{TTL: 5 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
cacheOpts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cacheOpts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
opts := QuerierOptions{
|
||||
Cache: cache,
|
||||
Cache: c,
|
||||
Reader: nil,
|
||||
FluxInterval: 5 * time.Minute,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
@@ -736,9 +753,16 @@ func TestQueryRangeValueType(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
cache := inmemory.New(&inmemory.Options{TTL: 60 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
cacheOpts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cacheOpts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
opts := QuerierOptions{
|
||||
Cache: cache,
|
||||
Cache: c,
|
||||
Reader: nil,
|
||||
FluxInterval: 5 * time.Minute,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
@@ -894,9 +918,16 @@ func TestQueryRangeTimeShiftWithCache(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
cache := inmemory.New(&inmemory.Options{TTL: 60 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
cacheOpts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cacheOpts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
opts := QuerierOptions{
|
||||
Cache: cache,
|
||||
Cache: c,
|
||||
Reader: nil,
|
||||
FluxInterval: 5 * time.Minute,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
@@ -995,9 +1026,16 @@ func TestQueryRangeTimeShiftWithLimitAndCache(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
cache := inmemory.New(&inmemory.Options{TTL: 60 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
cacheOpts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cacheOpts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
opts := QuerierOptions{
|
||||
Cache: cache,
|
||||
Cache: c,
|
||||
Reader: nil,
|
||||
FluxInterval: 5 * time.Minute,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
@@ -1067,9 +1105,16 @@ func TestQueryRangeValueTypePromQL(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
cache := inmemory.New(&inmemory.Options{TTL: 60 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
cacheOpts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cacheOpts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
opts := QuerierOptions{
|
||||
Cache: cache,
|
||||
Cache: c,
|
||||
Reader: nil,
|
||||
FluxInterval: 5 * time.Minute,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
@@ -1094,17 +1139,17 @@ func TestQueryRangeValueTypePromQL(t *testing.T) {
|
||||
|
||||
expectedQueryAndTimeRanges := []struct {
|
||||
query string
|
||||
ranges []querycache.MissInterval
|
||||
ranges []*querybuildertypes.MissInterval
|
||||
}{
|
||||
{
|
||||
query: "signoz_calls_total",
|
||||
ranges: []querycache.MissInterval{
|
||||
ranges: []*querybuildertypes.MissInterval{
|
||||
{Start: 1675115596722, End: 1675115596722 + 120*60*1000},
|
||||
},
|
||||
},
|
||||
{
|
||||
query: "signoz_latency_bucket",
|
||||
ranges: []querycache.MissInterval{
|
||||
ranges: []*querybuildertypes.MissInterval{
|
||||
{Start: 1675115596722 + 60*60*1000, End: 1675115596722 + 180*60*1000},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -13,7 +13,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
@@ -106,10 +106,10 @@ func (q *querier) runBuilderQuery(
|
||||
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series}
|
||||
return
|
||||
}
|
||||
misses := q.queryCache.FindMissingTimeRangesV2(start, end, builderQuery.StepInterval, cacheKeys[queryName])
|
||||
misses := q.queryCache.FindMissingTimeRangesV2(ctx, start, end, builderQuery.StepInterval, cacheKeys[queryName])
|
||||
zap.L().Info("cache misses for logs query", zap.Any("misses", misses))
|
||||
missedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
filteredMissedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
missedSeries := make([]*querybuildertypes.SeriesData, 0)
|
||||
filteredMissedSeries := make([]*querybuildertypes.SeriesData, 0)
|
||||
for _, miss := range misses {
|
||||
query, err = prepareLogsQuery(ctx, miss.Start, miss.End, builderQuery, params)
|
||||
if err != nil {
|
||||
@@ -132,7 +132,7 @@ func (q *querier) runBuilderQuery(
|
||||
// making sure that empty range doesn't doesn't enter the cache
|
||||
// empty results from filteredSeries means data was filtered out, but empty series means actual empty data
|
||||
if len(filteredSeries) > 0 || len(series) == 0 {
|
||||
filteredMissedSeries = append(filteredMissedSeries, querycache.CachedSeriesData{
|
||||
filteredMissedSeries = append(filteredMissedSeries, &querybuildertypes.SeriesData{
|
||||
Data: filteredSeries,
|
||||
Start: startTime,
|
||||
End: endTime,
|
||||
@@ -140,17 +140,17 @@ func (q *querier) runBuilderQuery(
|
||||
}
|
||||
|
||||
// for the actual response
|
||||
missedSeries = append(missedSeries, querycache.CachedSeriesData{
|
||||
missedSeries = append(missedSeries, &querybuildertypes.SeriesData{
|
||||
Data: series,
|
||||
Start: miss.Start,
|
||||
End: miss.End,
|
||||
})
|
||||
}
|
||||
|
||||
filteredMergedSeries := q.queryCache.MergeWithCachedSeriesDataV2(cacheKeys[queryName], filteredMissedSeries)
|
||||
q.queryCache.StoreSeriesInCache(cacheKeys[queryName], filteredMergedSeries)
|
||||
filteredMergedSeries := q.queryCache.MergeWithCachedSeriesDataV2(ctx, cacheKeys[queryName], filteredMissedSeries)
|
||||
q.queryCache.StoreSeriesInCache(ctx, cacheKeys[queryName], filteredMergedSeries)
|
||||
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesDataV2(cacheKeys[queryName], missedSeries)
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesDataV2(ctx, cacheKeys[queryName], missedSeries)
|
||||
|
||||
resultSeries := common.GetSeriesFromCachedDataV2(mergedSeries, start, end, builderQuery.StepInterval)
|
||||
|
||||
@@ -238,9 +238,9 @@ func (q *querier) runBuilderQuery(
|
||||
return
|
||||
}
|
||||
|
||||
misses := q.queryCache.FindMissingTimeRanges(start, end, builderQuery.StepInterval, cacheKeys[queryName])
|
||||
misses := q.queryCache.FindMissingTimeRanges(ctx, start, end, builderQuery.StepInterval, cacheKeys[queryName])
|
||||
zap.L().Info("cache misses for metrics query", zap.Any("misses", misses))
|
||||
missedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
missedSeries := make([]*querybuildertypes.SeriesData, 0)
|
||||
for _, miss := range misses {
|
||||
query, err := metricsV4.PrepareMetricQuery(
|
||||
miss.Start,
|
||||
@@ -269,13 +269,13 @@ func (q *querier) runBuilderQuery(
|
||||
}
|
||||
return
|
||||
}
|
||||
missedSeries = append(missedSeries, querycache.CachedSeriesData{
|
||||
missedSeries = append(missedSeries, &querybuildertypes.SeriesData{
|
||||
Data: series,
|
||||
Start: miss.Start,
|
||||
End: miss.End,
|
||||
})
|
||||
}
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesData(cacheKeys[queryName], missedSeries)
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesData(ctx, cacheKeys[queryName], missedSeries)
|
||||
|
||||
resultSeries := common.GetSeriesFromCachedData(mergedSeries, start, end)
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
logsV4 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v4"
|
||||
metricsV4 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v4"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
@@ -13,10 +14,9 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
chErrors "github.com/SigNoz/signoz/pkg/query-service/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
@@ -68,7 +68,7 @@ func NewQuerier(opts QuerierOptions) interfaces.Querier {
|
||||
logsQueryBuilder := logsV4.PrepareLogsQuery
|
||||
tracesQueryBuilder := tracesV4.PrepareTracesQuery
|
||||
|
||||
qc := querycache.NewQueryCache(querycache.WithCache(opts.Cache), querycache.WithFluxInterval(opts.FluxInterval))
|
||||
qc := querybuildertypes.NewQueryCache(querybuildertypes.WithCache(opts.Cache), querybuildertypes.WithFluxInterval(opts.FluxInterval))
|
||||
|
||||
return &querier{
|
||||
cache: opts.Cache,
|
||||
@@ -212,9 +212,9 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam
|
||||
channelResults <- channelResult{Err: err, Name: queryName, Query: query.Query, Series: series}
|
||||
return
|
||||
}
|
||||
misses := q.queryCache.FindMissingTimeRanges(params.Start, params.End, params.Step, cacheKey)
|
||||
misses := q.queryCache.FindMissingTimeRanges(ctx, params.Start, params.End, params.Step, cacheKey)
|
||||
zap.L().Info("cache misses for metrics prom query", zap.Any("misses", misses))
|
||||
missedSeries := make([]querycache.CachedSeriesData, 0)
|
||||
missedSeries := make([]*querybuildertypes.SeriesData, 0)
|
||||
for _, miss := range misses {
|
||||
query := metricsV4.BuildPromQuery(promQuery, params.Step, miss.Start, miss.End)
|
||||
series, err := q.execPromQuery(ctx, query)
|
||||
@@ -222,13 +222,13 @@ func (q *querier) runPromQueries(ctx context.Context, params *v3.QueryRangeParam
|
||||
channelResults <- channelResult{Err: err, Name: queryName, Query: query.Query, Series: nil}
|
||||
return
|
||||
}
|
||||
missedSeries = append(missedSeries, querycache.CachedSeriesData{
|
||||
missedSeries = append(missedSeries, &querybuildertypes.SeriesData{
|
||||
Data: series,
|
||||
Start: miss.Start,
|
||||
End: miss.End,
|
||||
})
|
||||
}
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesData(cacheKey, missedSeries)
|
||||
mergedSeries := q.queryCache.MergeWithCachedSeriesData(ctx, cacheKey, missedSeries)
|
||||
resultSeries := common.GetSeriesFromCachedData(mergedSeries, params.Start, params.End)
|
||||
channelResults <- channelResult{Err: nil, Name: queryName, Query: promQuery.Query, Series: resultSeries}
|
||||
}(queryName, promQuery)
|
||||
|
||||
@@ -2,7 +2,6 @@ package v2
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"strings"
|
||||
@@ -10,18 +9,20 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/DATA-DOG/go-sqlmock"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/cache/memorycache"
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus/prometheustest"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache/inmemory"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes"
|
||||
cmock "github.com/srikanthccv/ClickHouse-go-mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
@@ -63,7 +64,7 @@ func TestV2FindMissingTimeRangesZeroFreshNess(t *testing.T) {
|
||||
requestedEnd int64 // in milliseconds
|
||||
requestedStep int64 // in seconds
|
||||
cachedSeries []*v3.Series
|
||||
expectedMiss []querycache.MissInterval
|
||||
expectedMiss []*querybuildertypes.MissInterval
|
||||
replaceCachedData bool
|
||||
}{
|
||||
{
|
||||
@@ -88,7 +89,7 @@ func TestV2FindMissingTimeRangesZeroFreshNess(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*querybuildertypes.MissInterval{
|
||||
{
|
||||
Start: 1675115596722,
|
||||
End: 1675115596722 + 60*60*1000,
|
||||
@@ -129,7 +130,7 @@ func TestV2FindMissingTimeRangesZeroFreshNess(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{},
|
||||
expectedMiss: []*querybuildertypes.MissInterval{},
|
||||
},
|
||||
{
|
||||
name: "cached time range is a left overlap of the requested time range",
|
||||
@@ -157,7 +158,7 @@ func TestV2FindMissingTimeRangesZeroFreshNess(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*querybuildertypes.MissInterval{
|
||||
{
|
||||
Start: 1675115596722 + 120*60*1000,
|
||||
End: 1675115596722 + 180*60*1000,
|
||||
@@ -190,7 +191,7 @@ func TestV2FindMissingTimeRangesZeroFreshNess(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*querybuildertypes.MissInterval{
|
||||
{
|
||||
Start: 1675115596722,
|
||||
End: 1675115596722 + 60*60*1000,
|
||||
@@ -223,7 +224,7 @@ func TestV2FindMissingTimeRangesZeroFreshNess(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*querybuildertypes.MissInterval{
|
||||
{
|
||||
Start: 1675115596722,
|
||||
End: 1675115596722 + 180*60*1000,
|
||||
@@ -232,29 +233,35 @@ func TestV2FindMissingTimeRangesZeroFreshNess(t *testing.T) {
|
||||
replaceCachedData: true,
|
||||
},
|
||||
}
|
||||
opts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
|
||||
c := inmemory.New(&inmemory.Options{TTL: 5 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
|
||||
qc := querycache.NewQueryCache(querycache.WithCache(c))
|
||||
qc := querybuildertypes.NewQueryCache(querybuildertypes.WithCache(c))
|
||||
|
||||
for idx, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cacheKey := fmt.Sprintf("test-cache-key-%d", idx)
|
||||
cachedData := &querycache.CachedSeriesData{
|
||||
cachedData := &querybuildertypes.SeriesData{
|
||||
Start: minTimestamp(tc.cachedSeries),
|
||||
End: maxTimestamp(tc.cachedSeries),
|
||||
Data: tc.cachedSeries,
|
||||
}
|
||||
jsonData, err := json.Marshal([]*querycache.CachedSeriesData{cachedData})
|
||||
data := querybuildertypes.CachedSeriesData{Series: []*querybuildertypes.SeriesData{cachedData}}
|
||||
if err != nil {
|
||||
t.Errorf("error marshalling cached data: %v", err)
|
||||
}
|
||||
err = c.Store(cacheKey, jsonData, 5*time.Minute)
|
||||
err = c.Store(context.Background(), cacheKey, &data, 5*time.Minute)
|
||||
if err != nil {
|
||||
t.Errorf("error storing cached data: %v", err)
|
||||
}
|
||||
|
||||
misses := qc.FindMissingTimeRanges(tc.requestedStart, tc.requestedEnd, tc.requestedStep, cacheKey)
|
||||
misses := qc.FindMissingTimeRanges(context.Background(), tc.requestedStart, tc.requestedEnd, tc.requestedStep, cacheKey)
|
||||
if len(misses) != len(tc.expectedMiss) {
|
||||
t.Errorf("expected %d misses, got %d", len(tc.expectedMiss), len(misses))
|
||||
}
|
||||
@@ -280,7 +287,7 @@ func TestV2FindMissingTimeRangesWithFluxInterval(t *testing.T) {
|
||||
requestedStep int64
|
||||
cachedSeries []*v3.Series
|
||||
fluxInterval time.Duration
|
||||
expectedMiss []querycache.MissInterval
|
||||
expectedMiss []*querybuildertypes.MissInterval
|
||||
}{
|
||||
{
|
||||
name: "cached time range is a subset of the requested time range",
|
||||
@@ -305,7 +312,7 @@ func TestV2FindMissingTimeRangesWithFluxInterval(t *testing.T) {
|
||||
},
|
||||
},
|
||||
fluxInterval: 5 * time.Minute,
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*querybuildertypes.MissInterval{
|
||||
{
|
||||
Start: 1675115596722,
|
||||
End: 1675115596722 + 60*60*1000,
|
||||
@@ -347,7 +354,7 @@ func TestV2FindMissingTimeRangesWithFluxInterval(t *testing.T) {
|
||||
},
|
||||
},
|
||||
fluxInterval: 5 * time.Minute,
|
||||
expectedMiss: []querycache.MissInterval{},
|
||||
expectedMiss: []*querybuildertypes.MissInterval{},
|
||||
},
|
||||
{
|
||||
name: "cache time range is a left overlap of the requested time range",
|
||||
@@ -376,7 +383,7 @@ func TestV2FindMissingTimeRangesWithFluxInterval(t *testing.T) {
|
||||
},
|
||||
},
|
||||
fluxInterval: 5 * time.Minute,
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*querybuildertypes.MissInterval{
|
||||
{
|
||||
Start: 1675115596722 + 120*60*1000,
|
||||
End: 1675115596722 + 180*60*1000,
|
||||
@@ -410,7 +417,7 @@ func TestV2FindMissingTimeRangesWithFluxInterval(t *testing.T) {
|
||||
},
|
||||
},
|
||||
fluxInterval: 5 * time.Minute,
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*querybuildertypes.MissInterval{
|
||||
{
|
||||
Start: 1675115596722,
|
||||
End: 1675115596722 + 60*60*1000,
|
||||
@@ -444,7 +451,7 @@ func TestV2FindMissingTimeRangesWithFluxInterval(t *testing.T) {
|
||||
},
|
||||
},
|
||||
fluxInterval: 5 * time.Minute,
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*querybuildertypes.MissInterval{
|
||||
{
|
||||
Start: 1675115596722,
|
||||
End: 1675115596722 + 180*60*1000,
|
||||
@@ -453,29 +460,35 @@ func TestV2FindMissingTimeRangesWithFluxInterval(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
c := inmemory.New(&inmemory.Options{TTL: 5 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
|
||||
qc := querycache.NewQueryCache(querycache.WithCache(c))
|
||||
opts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
qc := querybuildertypes.NewQueryCache(querybuildertypes.WithCache(c))
|
||||
|
||||
for idx, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cacheKey := fmt.Sprintf("test-cache-key-%d", idx)
|
||||
cachedData := &querycache.CachedSeriesData{
|
||||
cachedData := &querybuildertypes.SeriesData{
|
||||
Start: minTimestamp(tc.cachedSeries),
|
||||
End: maxTimestamp(tc.cachedSeries),
|
||||
Data: tc.cachedSeries,
|
||||
}
|
||||
jsonData, err := json.Marshal([]*querycache.CachedSeriesData{cachedData})
|
||||
data := querybuildertypes.CachedSeriesData{Series: []*querybuildertypes.SeriesData{cachedData}}
|
||||
if err != nil {
|
||||
t.Errorf("error marshalling cached data: %v", err)
|
||||
return
|
||||
}
|
||||
err = c.Store(cacheKey, jsonData, 5*time.Minute)
|
||||
err = c.Store(context.Background(), cacheKey, &data, 5*time.Minute)
|
||||
if err != nil {
|
||||
t.Errorf("error storing cached data: %v", err)
|
||||
return
|
||||
}
|
||||
misses := qc.FindMissingTimeRanges(tc.requestedStart, tc.requestedEnd, tc.requestedStep, cacheKey)
|
||||
misses := qc.FindMissingTimeRanges(context.Background(), tc.requestedStart, tc.requestedEnd, tc.requestedStep, cacheKey)
|
||||
if len(misses) != len(tc.expectedMiss) {
|
||||
t.Errorf("expected %d misses, got %d", len(tc.expectedMiss), len(misses))
|
||||
}
|
||||
@@ -634,9 +647,17 @@ func TestV2QueryRangePanelGraph(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
cache := inmemory.New(&inmemory.Options{TTL: 5 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
cacheOpts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cacheOpts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
|
||||
opts := QuerierOptions{
|
||||
Cache: cache,
|
||||
Cache: c,
|
||||
Reader: nil,
|
||||
FluxInterval: 5 * time.Minute,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
@@ -783,9 +804,17 @@ func TestV2QueryRangeValueType(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
cache := inmemory.New(&inmemory.Options{TTL: 60 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
|
||||
cacheopts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cacheopts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
opts := QuerierOptions{
|
||||
Cache: cache,
|
||||
Cache: c,
|
||||
Reader: nil,
|
||||
FluxInterval: 5 * time.Minute,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
@@ -944,9 +973,16 @@ func TestV2QueryRangeTimeShiftWithCache(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
cache := inmemory.New(&inmemory.Options{TTL: 60 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
cacheopts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cacheopts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
opts := QuerierOptions{
|
||||
Cache: cache,
|
||||
Cache: c,
|
||||
Reader: nil,
|
||||
FluxInterval: 5 * time.Minute,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
@@ -1047,9 +1083,16 @@ func TestV2QueryRangeTimeShiftWithLimitAndCache(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
cache := inmemory.New(&inmemory.Options{TTL: 60 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
cacheopts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cacheopts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
opts := QuerierOptions{
|
||||
Cache: cache,
|
||||
Cache: c,
|
||||
Reader: nil,
|
||||
FluxInterval: 5 * time.Minute,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
@@ -1121,9 +1164,16 @@ func TestV2QueryRangeValueTypePromQL(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
cache := inmemory.New(&inmemory.Options{TTL: 60 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
cacheopts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cacheopts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
opts := QuerierOptions{
|
||||
Cache: cache,
|
||||
Cache: c,
|
||||
Reader: nil,
|
||||
FluxInterval: 5 * time.Minute,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
@@ -1148,17 +1198,17 @@ func TestV2QueryRangeValueTypePromQL(t *testing.T) {
|
||||
|
||||
expectedQueryAndTimeRanges := []struct {
|
||||
query string
|
||||
ranges []querycache.MissInterval
|
||||
ranges []*querybuildertypes.MissInterval
|
||||
}{
|
||||
{
|
||||
query: "signoz_calls_total",
|
||||
ranges: []querycache.MissInterval{
|
||||
ranges: []*querybuildertypes.MissInterval{
|
||||
{Start: 1675115596722, End: 1675115596722 + 120*60*1000},
|
||||
},
|
||||
},
|
||||
{
|
||||
query: "signoz_latency_bucket",
|
||||
ranges: []querycache.MissInterval{
|
||||
ranges: []*querybuildertypes.MissInterval{
|
||||
{Start: 1675115596722 + 60*60*1000, End: 1675115596722 + 180*60*1000},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -5,8 +5,8 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/govaluate"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
metricsV3 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||
@@ -32,7 +33,6 @@ import (
|
||||
"github.com/soheilhy/cmux"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/explorer"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/featureManager"
|
||||
@@ -49,7 +49,6 @@ type ServerOptions struct {
|
||||
HTTPHostPort string
|
||||
PrivateHostPort string
|
||||
PreferSpanMetrics bool
|
||||
CacheConfigPath string
|
||||
FluxInterval string
|
||||
FluxIntervalForTraceDetail string
|
||||
Cluster string
|
||||
@@ -112,19 +111,10 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
serverOptions.SigNoz.Cache,
|
||||
)
|
||||
|
||||
var c cache.Cache
|
||||
if serverOptions.CacheConfigPath != "" {
|
||||
cacheOpts, err := cache.LoadFromYAMLCacheConfigFile(serverOptions.CacheConfigPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c = cache.NewCache(cacheOpts)
|
||||
}
|
||||
|
||||
rm, err := makeRulesManager(
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
reader,
|
||||
c,
|
||||
serverOptions.SigNoz.Cache,
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
serverOptions.SigNoz.Prometheus,
|
||||
@@ -165,7 +155,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
IntegrationsController: integrationsController,
|
||||
CloudIntegrationsController: cloudIntegrationsController,
|
||||
LogsParsingPipelineController: logParsingPipelineController,
|
||||
Cache: c,
|
||||
FluxInterval: fluxInterval,
|
||||
JWT: serverOptions.Jwt,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(serverOptions.SigNoz.Alertmanager),
|
||||
|
||||
69
pkg/query-service/cache/cache.go
vendored
69
pkg/query-service/cache/cache.go
vendored
@@ -1,69 +0,0 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"os"
|
||||
"time"
|
||||
|
||||
inmemory "github.com/SigNoz/signoz/pkg/query-service/cache/inmemory"
|
||||
redis "github.com/SigNoz/signoz/pkg/query-service/cache/redis"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache/status"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
type Options struct {
|
||||
Name string `yaml:"-"`
|
||||
Provider string `yaml:"provider"`
|
||||
Redis *redis.Options `yaml:"redis,omitempty"`
|
||||
InMemory *inmemory.Options `yaml:"inmemory,omitempty"`
|
||||
}
|
||||
|
||||
// Cache is the interface for the storage backend
|
||||
type Cache interface {
|
||||
Connect() error
|
||||
Store(cacheKey string, data []byte, ttl time.Duration) error
|
||||
Retrieve(cacheKey string, allowExpired bool) ([]byte, status.RetrieveStatus, error)
|
||||
SetTTL(cacheKey string, ttl time.Duration)
|
||||
Remove(cacheKey string)
|
||||
BulkRemove(cacheKeys []string)
|
||||
Close() error
|
||||
}
|
||||
|
||||
// KeyGenerator is the interface for the key generator
|
||||
// The key generator is used to generate the cache keys for the cache entries
|
||||
type KeyGenerator interface {
|
||||
// GenerateKeys generates the cache keys for the given query range params
|
||||
// The keys are returned as a map where the key is the query name and the value is the cache key
|
||||
GenerateKeys(*v3.QueryRangeParamsV3) map[string]string
|
||||
}
|
||||
|
||||
// LoadFromYAMLCacheConfig loads the cache options from the given YAML config bytes
|
||||
func LoadFromYAMLCacheConfig(yamlConfig []byte) (*Options, error) {
|
||||
var options Options
|
||||
err := yaml.Unmarshal(yamlConfig, &options)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &options, nil
|
||||
}
|
||||
|
||||
// LoadFromYAMLCacheConfigFile loads the cache options from the given YAML config file
|
||||
func LoadFromYAMLCacheConfigFile(configFile string) (*Options, error) {
|
||||
bytes, err := os.ReadFile(configFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return LoadFromYAMLCacheConfig(bytes)
|
||||
}
|
||||
|
||||
// NewCache creates a new cache based on the given options
|
||||
func NewCache(options *Options) Cache {
|
||||
switch options.Provider {
|
||||
case "redis":
|
||||
return redis.New(options.Redis)
|
||||
case "inmemory":
|
||||
return inmemory.New(options.InMemory)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
52
pkg/query-service/cache/cache_test.go
vendored
52
pkg/query-service/cache/cache_test.go
vendored
@@ -1,52 +0,0 @@
|
||||
package cache
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestNewCacheUnKnownProvider(t *testing.T) {
|
||||
c := NewCache(&Options{
|
||||
Name: "test",
|
||||
Provider: "unknown",
|
||||
})
|
||||
|
||||
if c != nil {
|
||||
t.Fatalf("expected nil, got %v", c)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNewCacheInMemory(t *testing.T) {
|
||||
c := NewCache(&Options{
|
||||
Name: "test",
|
||||
Provider: "inmemory",
|
||||
})
|
||||
|
||||
if c == nil {
|
||||
t.Fatalf("expected non-nil, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestNewCacheRedis(t *testing.T) {
|
||||
c := NewCache(&Options{
|
||||
Name: "test",
|
||||
Provider: "redis",
|
||||
})
|
||||
|
||||
if c == nil {
|
||||
t.Fatalf("expected non-nil, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadFromYAMLCacheConfig(t *testing.T) {
|
||||
_, err := LoadFromYAMLCacheConfig([]byte(`
|
||||
provider: inmemory
|
||||
`))
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadFromYAMLCacheConfigFile(t *testing.T) {
|
||||
_, err := LoadFromYAMLCacheConfigFile("testdata/cache.yaml")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
}
|
||||
73
pkg/query-service/cache/inmemory/cache.go
vendored
73
pkg/query-service/cache/inmemory/cache.go
vendored
@@ -1,73 +0,0 @@
|
||||
package inmemory
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache/status"
|
||||
go_cache "github.com/patrickmn/go-cache"
|
||||
)
|
||||
|
||||
// cache implements the Cache interface
|
||||
type cache struct {
|
||||
cc *go_cache.Cache
|
||||
}
|
||||
|
||||
// New creates a new in-memory cache
|
||||
func New(opts *Options) *cache {
|
||||
if opts == nil {
|
||||
opts = defaultOptions()
|
||||
}
|
||||
return &cache{cc: go_cache.New(opts.TTL, opts.CleanupInterval)}
|
||||
}
|
||||
|
||||
// Connect does nothing
|
||||
func (c *cache) Connect() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Store stores the data in the cache
|
||||
func (c *cache) Store(cacheKey string, data []byte, ttl time.Duration) error {
|
||||
c.cc.Set(cacheKey, data, ttl)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Retrieve retrieves the data from the cache
|
||||
func (c *cache) Retrieve(cacheKey string, allowExpired bool) ([]byte, status.RetrieveStatus, error) {
|
||||
data, found := c.cc.Get(cacheKey)
|
||||
if !found {
|
||||
return nil, status.RetrieveStatusKeyMiss, nil
|
||||
}
|
||||
|
||||
return data.([]byte), status.RetrieveStatusHit, nil
|
||||
}
|
||||
|
||||
// SetTTL sets the TTL for the cache entry
|
||||
func (c *cache) SetTTL(cacheKey string, ttl time.Duration) {
|
||||
item, found := c.cc.Get(cacheKey)
|
||||
if !found {
|
||||
return
|
||||
}
|
||||
_ = c.cc.Replace(cacheKey, item, ttl)
|
||||
}
|
||||
|
||||
// Remove removes the cache entry
|
||||
func (c *cache) Remove(cacheKey string) {
|
||||
c.cc.Delete(cacheKey)
|
||||
}
|
||||
|
||||
// BulkRemove removes the cache entries
|
||||
func (c *cache) BulkRemove(cacheKeys []string) {
|
||||
for _, cacheKey := range cacheKeys {
|
||||
c.cc.Delete(cacheKey)
|
||||
}
|
||||
}
|
||||
|
||||
// Close does nothing
|
||||
func (c *cache) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Configuration returns the cache configuration
|
||||
func (c *cache) Configuration() *Options {
|
||||
return nil
|
||||
}
|
||||
102
pkg/query-service/cache/inmemory/cache_test.go
vendored
102
pkg/query-service/cache/inmemory/cache_test.go
vendored
@@ -1,102 +0,0 @@
|
||||
package inmemory
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache/status"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
// TestNew tests the New function
|
||||
func TestNew(t *testing.T) {
|
||||
opts := &Options{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
}
|
||||
c := New(opts)
|
||||
assert.NotNil(t, c)
|
||||
assert.NotNil(t, c.cc)
|
||||
}
|
||||
|
||||
// TestConnect tests the Connect function
|
||||
func TestConnect(t *testing.T) {
|
||||
c := New(nil)
|
||||
assert.NoError(t, c.Connect())
|
||||
}
|
||||
|
||||
// TestStore tests the Store function
|
||||
func TestStore(t *testing.T) {
|
||||
c := New(nil)
|
||||
assert.NoError(t, c.Store("key", []byte("value"), 10*time.Second))
|
||||
}
|
||||
|
||||
// TestRetrieve tests the Retrieve function
|
||||
func TestRetrieve(t *testing.T) {
|
||||
c := New(nil)
|
||||
assert.NoError(t, c.Store("key", []byte("value"), 10*time.Second))
|
||||
data, retrieveStatus, err := c.Retrieve("key", false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, retrieveStatus, status.RetrieveStatusHit)
|
||||
assert.Equal(t, data, []byte("value"))
|
||||
}
|
||||
|
||||
// TestSetTTL tests the SetTTL function
|
||||
func TestSetTTL(t *testing.T) {
|
||||
c := New(&Options{TTL: 10 * time.Second, CleanupInterval: 1 * time.Second})
|
||||
assert.NoError(t, c.Store("key", []byte("value"), 2*time.Second))
|
||||
time.Sleep(3 * time.Second)
|
||||
data, retrieveStatus, err := c.Retrieve("key", false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, retrieveStatus, status.RetrieveStatusKeyMiss)
|
||||
assert.Nil(t, data)
|
||||
|
||||
assert.NoError(t, c.Store("key", []byte("value"), 2*time.Second))
|
||||
c.SetTTL("key", 4*time.Second)
|
||||
time.Sleep(3 * time.Second)
|
||||
data, retrieveStatus, err = c.Retrieve("key", false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, retrieveStatus, status.RetrieveStatusHit)
|
||||
assert.Equal(t, data, []byte("value"))
|
||||
}
|
||||
|
||||
// TestRemove tests the Remove function
|
||||
func TestRemove(t *testing.T) {
|
||||
c := New(nil)
|
||||
assert.NoError(t, c.Store("key", []byte("value"), 10*time.Second))
|
||||
c.Remove("key")
|
||||
|
||||
data, retrieveStatus, err := c.Retrieve("key", false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, retrieveStatus, status.RetrieveStatusKeyMiss)
|
||||
assert.Nil(t, data)
|
||||
}
|
||||
|
||||
// TestBulkRemove tests the BulkRemove function
|
||||
func TestBulkRemove(t *testing.T) {
|
||||
c := New(nil)
|
||||
assert.NoError(t, c.Store("key1", []byte("value"), 10*time.Second))
|
||||
assert.NoError(t, c.Store("key2", []byte("value"), 10*time.Second))
|
||||
c.BulkRemove([]string{"key1", "key2"})
|
||||
|
||||
data, retrieveStatus, err := c.Retrieve("key1", false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, retrieveStatus, status.RetrieveStatusKeyMiss)
|
||||
assert.Nil(t, data)
|
||||
|
||||
data, retrieveStatus, err = c.Retrieve("key2", false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, retrieveStatus, status.RetrieveStatusKeyMiss)
|
||||
assert.Nil(t, data)
|
||||
}
|
||||
|
||||
// TestCache tests the cache
|
||||
func TestCache(t *testing.T) {
|
||||
c := New(nil)
|
||||
assert.NoError(t, c.Store("key", []byte("value"), 10*time.Second))
|
||||
data, retrieveStatus, err := c.Retrieve("key", false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, retrieveStatus, status.RetrieveStatusHit)
|
||||
assert.Equal(t, data, []byte("value"))
|
||||
c.Remove("key")
|
||||
}
|
||||
23
pkg/query-service/cache/inmemory/options.go
vendored
23
pkg/query-service/cache/inmemory/options.go
vendored
@@ -1,23 +0,0 @@
|
||||
package inmemory
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
go_cache "github.com/patrickmn/go-cache"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultTTL = go_cache.NoExpiration
|
||||
defaultCleanupInterval = 1 * time.Minute
|
||||
)
|
||||
|
||||
// Options holds the options for the in-memory cache
|
||||
type Options struct {
|
||||
// TTL is the time to live for the cache entries
|
||||
TTL time.Duration `yaml:"ttl,omitempty"`
|
||||
CleanupInterval time.Duration `yaml:"cleanupInterval,omitempty"`
|
||||
}
|
||||
|
||||
func defaultOptions() *Options {
|
||||
return &Options{TTL: defaultTTL, CleanupInterval: defaultCleanupInterval}
|
||||
}
|
||||
24
pkg/query-service/cache/redis/options.go
vendored
24
pkg/query-service/cache/redis/options.go
vendored
@@ -1,24 +0,0 @@
|
||||
package redis
|
||||
|
||||
const (
|
||||
defaultHost = "localhost"
|
||||
defaultPort = 6379
|
||||
defaultPassword = ""
|
||||
defaultDB = 0
|
||||
)
|
||||
|
||||
type Options struct {
|
||||
Host string `yaml:"host,omitempty"`
|
||||
Port int `yaml:"port,omitempty"`
|
||||
Password string `yaml:"password,omitempty"`
|
||||
DB int `yaml:"db,omitempty"`
|
||||
}
|
||||
|
||||
func defaultOptions() *Options {
|
||||
return &Options{
|
||||
Host: defaultHost,
|
||||
Port: defaultPort,
|
||||
Password: defaultPassword,
|
||||
DB: defaultDB,
|
||||
}
|
||||
}
|
||||
124
pkg/query-service/cache/redis/redis.go
vendored
124
pkg/query-service/cache/redis/redis.go
vendored
@@ -1,124 +0,0 @@
|
||||
package redis
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache/status"
|
||||
"github.com/go-redis/redis/v8"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type cache struct {
|
||||
client *redis.Client
|
||||
opts *Options
|
||||
}
|
||||
|
||||
// New creates a new cache
|
||||
func New(opts *Options) *cache {
|
||||
if opts == nil {
|
||||
opts = defaultOptions()
|
||||
}
|
||||
return &cache{opts: opts}
|
||||
}
|
||||
|
||||
// WithClient creates a new cache with the given client
|
||||
func WithClient(client *redis.Client) *cache {
|
||||
return &cache{client: client}
|
||||
}
|
||||
|
||||
// Connect connects to the redis server
|
||||
func (c *cache) Connect() error {
|
||||
c.client = redis.NewClient(&redis.Options{
|
||||
Addr: fmt.Sprintf("%s:%d", c.opts.Host, c.opts.Port),
|
||||
Password: c.opts.Password,
|
||||
DB: c.opts.DB,
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// Store stores the data in the cache
|
||||
func (c *cache) Store(cacheKey string, data []byte, ttl time.Duration) error {
|
||||
return c.client.Set(context.Background(), cacheKey, data, ttl).Err()
|
||||
}
|
||||
|
||||
// Retrieve retrieves the data from the cache
|
||||
func (c *cache) Retrieve(cacheKey string, allowExpired bool) ([]byte, status.RetrieveStatus, error) {
|
||||
data, err := c.client.Get(context.Background(), cacheKey).Bytes()
|
||||
if err != nil {
|
||||
if errors.Is(err, redis.Nil) {
|
||||
return nil, status.RetrieveStatusKeyMiss, nil
|
||||
}
|
||||
return nil, status.RetrieveStatusError, err
|
||||
}
|
||||
return data, status.RetrieveStatusHit, nil
|
||||
}
|
||||
|
||||
// SetTTL sets the TTL for the cache entry
|
||||
func (c *cache) SetTTL(cacheKey string, ttl time.Duration) {
|
||||
err := c.client.Expire(context.Background(), cacheKey, ttl).Err()
|
||||
if err != nil {
|
||||
zap.L().Error("error setting TTL for cache key", zap.String("cacheKey", cacheKey), zap.Duration("ttl", ttl), zap.Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
// Remove removes the cache entry
|
||||
func (c *cache) Remove(cacheKey string) {
|
||||
c.BulkRemove([]string{cacheKey})
|
||||
}
|
||||
|
||||
// BulkRemove removes the cache entries
|
||||
func (c *cache) BulkRemove(cacheKeys []string) {
|
||||
if err := c.client.Del(context.Background(), cacheKeys...).Err(); err != nil {
|
||||
zap.L().Error("error deleting cache keys", zap.Strings("cacheKeys", cacheKeys), zap.Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
// Close closes the connection to the redis server
|
||||
func (c *cache) Close() error {
|
||||
return c.client.Close()
|
||||
}
|
||||
|
||||
// Ping pings the redis server
|
||||
func (c *cache) Ping() error {
|
||||
return c.client.Ping(context.Background()).Err()
|
||||
}
|
||||
|
||||
// GetClient returns the redis client
|
||||
func (c *cache) GetClient() *redis.Client {
|
||||
return c.client
|
||||
}
|
||||
|
||||
// GetOptions returns the options
|
||||
func (c *cache) GetOptions() *Options {
|
||||
return c.opts
|
||||
}
|
||||
|
||||
// GetTTL returns the TTL for the cache entry
|
||||
func (c *cache) GetTTL(cacheKey string) time.Duration {
|
||||
ttl, err := c.client.TTL(context.Background(), cacheKey).Result()
|
||||
if err != nil {
|
||||
zap.L().Error("error getting TTL for cache key", zap.String("cacheKey", cacheKey), zap.Error(err))
|
||||
}
|
||||
return ttl
|
||||
}
|
||||
|
||||
// GetKeys returns the keys matching the pattern
|
||||
func (c *cache) GetKeys(pattern string) ([]string, error) {
|
||||
return c.client.Keys(context.Background(), pattern).Result()
|
||||
}
|
||||
|
||||
// GetKeysWithTTL returns the keys matching the pattern with their TTL
|
||||
func (c *cache) GetKeysWithTTL(pattern string) (map[string]time.Duration, error) {
|
||||
keys, err := c.GetKeys(pattern)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result := make(map[string]time.Duration)
|
||||
for _, key := range keys {
|
||||
result[key] = c.GetTTL(key)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
91
pkg/query-service/cache/redis/redis_test.go
vendored
91
pkg/query-service/cache/redis/redis_test.go
vendored
@@ -1,91 +0,0 @@
|
||||
package redis
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache/status"
|
||||
"github.com/go-redis/redismock/v8"
|
||||
)
|
||||
|
||||
func TestStore(t *testing.T) {
|
||||
db, mock := redismock.NewClientMock()
|
||||
c := WithClient(db)
|
||||
|
||||
mock.ExpectSet("key", []byte("value"), 10*time.Second).RedisNil()
|
||||
_ = c.Store("key", []byte("value"), 10*time.Second)
|
||||
|
||||
if err := mock.ExpectationsWereMet(); err != nil {
|
||||
t.Errorf("there were unfulfilled expectations: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRetrieve(t *testing.T) {
|
||||
db, mock := redismock.NewClientMock()
|
||||
c := WithClient(db)
|
||||
mock.ExpectSet("key", []byte("value"), 10*time.Second).RedisNil()
|
||||
_ = c.Store("key", []byte("value"), 10*time.Second)
|
||||
|
||||
mock.ExpectGet("key").SetVal("value")
|
||||
data, retrieveStatus, err := c.Retrieve("key", false)
|
||||
if err != nil {
|
||||
t.Errorf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
if retrieveStatus != status.RetrieveStatusHit {
|
||||
t.Errorf("expected status %d, got %d", status.RetrieveStatusHit, retrieveStatus)
|
||||
}
|
||||
|
||||
if string(data) != "value" {
|
||||
t.Errorf("expected value %s, got %s", "value", string(data))
|
||||
}
|
||||
|
||||
if err := mock.ExpectationsWereMet(); err != nil {
|
||||
t.Errorf("there were unfulfilled expectations: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSetTTL(t *testing.T) {
|
||||
db, mock := redismock.NewClientMock()
|
||||
c := WithClient(db)
|
||||
mock.ExpectSet("key", []byte("value"), 10*time.Second).RedisNil()
|
||||
_ = c.Store("key", []byte("value"), 10*time.Second)
|
||||
|
||||
mock.ExpectExpire("key", 4*time.Second).RedisNil()
|
||||
c.SetTTL("key", 4*time.Second)
|
||||
|
||||
if err := mock.ExpectationsWereMet(); err != nil {
|
||||
t.Errorf("there were unfulfilled expectations: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRemove(t *testing.T) {
|
||||
db, mock := redismock.NewClientMock()
|
||||
c := WithClient(db)
|
||||
mock.ExpectSet("key", []byte("value"), 10*time.Second).RedisNil()
|
||||
_ = c.Store("key", []byte("value"), 10*time.Second)
|
||||
|
||||
mock.ExpectDel("key").RedisNil()
|
||||
c.Remove("key")
|
||||
|
||||
if err := mock.ExpectationsWereMet(); err != nil {
|
||||
t.Errorf("there were unfulfilled expectations: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBulkRemove(t *testing.T) {
|
||||
db, mock := redismock.NewClientMock()
|
||||
c := WithClient(db)
|
||||
mock.ExpectSet("key", []byte("value"), 10*time.Second).RedisNil()
|
||||
_ = c.Store("key", []byte("value"), 10*time.Second)
|
||||
|
||||
mock.ExpectSet("key2", []byte("value2"), 10*time.Second).RedisNil()
|
||||
_ = c.Store("key2", []byte("value2"), 10*time.Second)
|
||||
|
||||
mock.ExpectDel("key", "key2").RedisNil()
|
||||
c.BulkRemove([]string{"key", "key2"})
|
||||
|
||||
if err := mock.ExpectationsWereMet(); err != nil {
|
||||
t.Errorf("there were unfulfilled expectations: %s", err)
|
||||
}
|
||||
}
|
||||
33
pkg/query-service/cache/status/status.go
vendored
33
pkg/query-service/cache/status/status.go
vendored
@@ -1,33 +0,0 @@
|
||||
package status
|
||||
|
||||
// RetrieveStatus defines the possible status of a cache lookup
|
||||
type RetrieveStatus int
|
||||
|
||||
const (
|
||||
RetrieveStatusHit = RetrieveStatus(iota)
|
||||
RetrieveStatusPartialHit
|
||||
RetrieveStatusRangeMiss
|
||||
RetrieveStatusKeyMiss
|
||||
RetrieveStatusRevalidated
|
||||
|
||||
RetrieveStatusError
|
||||
)
|
||||
|
||||
func (s RetrieveStatus) String() string {
|
||||
switch s {
|
||||
case RetrieveStatusHit:
|
||||
return "hit"
|
||||
case RetrieveStatusPartialHit:
|
||||
return "partial hit"
|
||||
case RetrieveStatusRangeMiss:
|
||||
return "range miss"
|
||||
case RetrieveStatusKeyMiss:
|
||||
return "key miss"
|
||||
case RetrieveStatusRevalidated:
|
||||
return "revalidated"
|
||||
case RetrieveStatusError:
|
||||
return "error"
|
||||
default:
|
||||
return "unknown"
|
||||
}
|
||||
}
|
||||
43
pkg/query-service/cache/status/status_test.go
vendored
43
pkg/query-service/cache/status/status_test.go
vendored
@@ -1,43 +0,0 @@
|
||||
package status
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestRetrieveStatusString(t *testing.T) {
|
||||
tests := []struct {
|
||||
status RetrieveStatus
|
||||
want string
|
||||
}{
|
||||
{
|
||||
status: RetrieveStatusHit,
|
||||
want: "hit",
|
||||
},
|
||||
{
|
||||
status: RetrieveStatusPartialHit,
|
||||
want: "partial hit",
|
||||
},
|
||||
{
|
||||
status: RetrieveStatusRangeMiss,
|
||||
want: "range miss",
|
||||
},
|
||||
{
|
||||
status: RetrieveStatusKeyMiss,
|
||||
want: "key miss",
|
||||
},
|
||||
{
|
||||
status: RetrieveStatusRevalidated,
|
||||
want: "revalidated",
|
||||
},
|
||||
{
|
||||
status: RetrieveStatusError,
|
||||
want: "error",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
if got := tt.status.String(); got != tt.want {
|
||||
t.Errorf("RetrieveStatus.String() = %v, want %v", got, tt.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
2
pkg/query-service/cache/testdata/cache.yaml
vendored
2
pkg/query-service/cache/testdata/cache.yaml
vendored
@@ -1,2 +0,0 @@
|
||||
name: test
|
||||
provider: inmemory
|
||||
@@ -9,8 +9,8 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes"
|
||||
)
|
||||
|
||||
func AdjustedMetricTimeRange(start, end, step int64, mq v3.BuilderQuery) (int64, int64) {
|
||||
@@ -93,7 +93,7 @@ func NormalizeLabelName(name string) string {
|
||||
return normalized
|
||||
}
|
||||
|
||||
func GetSeriesFromCachedData(data []querycache.CachedSeriesData, start, end int64) []*v3.Series {
|
||||
func GetSeriesFromCachedData(data []*querybuildertypes.SeriesData, start, end int64) []*v3.Series {
|
||||
series := make(map[uint64]*v3.Series)
|
||||
|
||||
for _, cachedData := range data {
|
||||
@@ -126,7 +126,7 @@ func GetSeriesFromCachedData(data []querycache.CachedSeriesData, start, end int6
|
||||
}
|
||||
|
||||
// It is different from GetSeriesFromCachedData because doesn't remove a point if it is >= (start - (start % step*1000))
|
||||
func GetSeriesFromCachedDataV2(data []querycache.CachedSeriesData, start, end, step int64) []*v3.Series {
|
||||
func GetSeriesFromCachedDataV2(data []*querybuildertypes.SeriesData, start, end, step int64) []*v3.Series {
|
||||
series := make(map[uint64]*v3.Series)
|
||||
|
||||
for _, cachedData := range data {
|
||||
|
||||
@@ -4,7 +4,7 @@ import (
|
||||
"testing"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes"
|
||||
)
|
||||
|
||||
func TestFilterSeriesPoints(t *testing.T) {
|
||||
@@ -308,7 +308,7 @@ func TestFilterSeriesPoints(t *testing.T) {
|
||||
func TestGetSeriesFromCachedData(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
data []querycache.CachedSeriesData
|
||||
data []*querybuildertypes.SeriesData
|
||||
start int64
|
||||
end int64
|
||||
expectedCount int
|
||||
@@ -316,7 +316,7 @@ func TestGetSeriesFromCachedData(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "Single point outside range",
|
||||
data: []querycache.CachedSeriesData{
|
||||
data: []*querybuildertypes.SeriesData{
|
||||
{
|
||||
Data: []*v3.Series{
|
||||
{
|
||||
@@ -335,7 +335,7 @@ func TestGetSeriesFromCachedData(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "Single point inside range",
|
||||
data: []querycache.CachedSeriesData{
|
||||
data: []*querybuildertypes.SeriesData{
|
||||
{
|
||||
Data: []*v3.Series{
|
||||
{
|
||||
@@ -371,7 +371,7 @@ func TestGetSeriesFromCachedData(t *testing.T) {
|
||||
func TestGetSeriesFromCachedDataV2(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
data []querycache.CachedSeriesData
|
||||
data []*querybuildertypes.SeriesData
|
||||
start int64
|
||||
end int64
|
||||
step int64
|
||||
@@ -380,7 +380,7 @@ func TestGetSeriesFromCachedDataV2(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "Single point outside range",
|
||||
data: []querycache.CachedSeriesData{
|
||||
data: []*querybuildertypes.SeriesData{
|
||||
{
|
||||
Data: []*v3.Series{
|
||||
{
|
||||
@@ -400,7 +400,7 @@ func TestGetSeriesFromCachedDataV2(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "Single point inside range",
|
||||
data: []querycache.CachedSeriesData{
|
||||
data: []*querybuildertypes.SeriesData{
|
||||
{
|
||||
Data: []*v3.Series{
|
||||
{
|
||||
|
||||
@@ -7,7 +7,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model/metrics_explorer"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/util/stats"
|
||||
)
|
||||
@@ -151,9 +151,9 @@ type Querier interface {
|
||||
}
|
||||
|
||||
type QueryCache interface {
|
||||
FindMissingTimeRanges(start, end int64, step int64, cacheKey string) []querycache.MissInterval
|
||||
FindMissingTimeRangesV2(start, end int64, step int64, cacheKey string) []querycache.MissInterval
|
||||
MergeWithCachedSeriesData(cacheKey string, newData []querycache.CachedSeriesData) []querycache.CachedSeriesData
|
||||
StoreSeriesInCache(cacheKey string, series []querycache.CachedSeriesData)
|
||||
MergeWithCachedSeriesDataV2(cacheKey string, series []querycache.CachedSeriesData) []querycache.CachedSeriesData
|
||||
FindMissingTimeRanges(ctx context.Context, start, end int64, step int64, cacheKey string) []*querybuildertypes.MissInterval
|
||||
FindMissingTimeRangesV2(ctx context.Context, start, end int64, step int64, cacheKey string) []*querybuildertypes.MissInterval
|
||||
MergeWithCachedSeriesData(ctx context.Context, cacheKey string, newData []*querybuildertypes.SeriesData) []*querybuildertypes.SeriesData
|
||||
StoreSeriesInCache(ctx context.Context, cacheKey string, series []*querybuildertypes.SeriesData)
|
||||
MergeWithCachedSeriesDataV2(ctx context.Context, cacheKey string, series []*querybuildertypes.SeriesData) []*querybuildertypes.SeriesData
|
||||
}
|
||||
|
||||
@@ -60,6 +60,7 @@ func main() {
|
||||
flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)")
|
||||
// Deprecated
|
||||
flag.StringVar(&ruleRepoURL, "rules.repo-url", constants.AlertHelpPage, "(host address used to build rule link in alert messages)")
|
||||
// Deprecated
|
||||
flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)")
|
||||
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)")
|
||||
flag.StringVar(&fluxIntervalForTraceDetail, "flux-interval-trace-detail", "2m", "(the interval to exclude data from being cached to avoid incorrect cache for trace data in motion)")
|
||||
@@ -128,7 +129,6 @@ func main() {
|
||||
HTTPHostPort: constants.HTTPHostPort,
|
||||
PreferSpanMetrics: preferSpanMetrics,
|
||||
PrivateHostPort: constants.PrivateHostPort,
|
||||
CacheConfigPath: cacheConfigPath,
|
||||
FluxInterval: fluxInterval,
|
||||
FluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
|
||||
Cluster: cluster,
|
||||
|
||||
@@ -18,8 +18,8 @@ import (
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
|
||||
@@ -1,34 +1,46 @@
|
||||
package querycache
|
||||
package querybuildertypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"math"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type queryCache struct {
|
||||
cache cache.Cache
|
||||
fluxInterval time.Duration
|
||||
type SeriesData struct {
|
||||
Start int64 `json:"start"`
|
||||
End int64 `json:"end"`
|
||||
Data []*v3.Series `json:"data"`
|
||||
}
|
||||
|
||||
type CachedSeriesData struct {
|
||||
Series []*SeriesData
|
||||
}
|
||||
|
||||
type MissInterval struct {
|
||||
Start, End int64 // in milliseconds
|
||||
}
|
||||
|
||||
type CachedSeriesData struct {
|
||||
Start int64 `json:"start"`
|
||||
End int64 `json:"end"`
|
||||
Data []*v3.Series `json:"data"`
|
||||
func (c *CachedSeriesData) MarshalBinary() (data []byte, err error) {
|
||||
return json.Marshal(c)
|
||||
}
|
||||
func (c *CachedSeriesData) UnmarshalBinary(data []byte) error {
|
||||
return json.Unmarshal(data, c)
|
||||
}
|
||||
|
||||
type QueryCacheOption func(q *queryCache)
|
||||
|
||||
type queryCache struct {
|
||||
cache cache.Cache
|
||||
fluxInterval time.Duration
|
||||
}
|
||||
|
||||
func NewQueryCache(opts ...QueryCacheOption) *queryCache {
|
||||
q := &queryCache{}
|
||||
for _, opt := range opts {
|
||||
@@ -51,29 +63,29 @@ func WithFluxInterval(fluxInterval time.Duration) QueryCacheOption {
|
||||
|
||||
// FindMissingTimeRangesV2 is a new correct implementation of FindMissingTimeRanges
|
||||
// It takes care of any timestamps that were not queried due to rounding in the first version.
|
||||
func (q *queryCache) FindMissingTimeRangesV2(start, end int64, step int64, cacheKey string) []MissInterval {
|
||||
func (q *queryCache) FindMissingTimeRangesV2(ctx context.Context, start, end int64, step int64, cacheKey string) []*MissInterval {
|
||||
if q.cache == nil || cacheKey == "" {
|
||||
return []MissInterval{{Start: start, End: end}}
|
||||
return []*MissInterval{{Start: start, End: end}}
|
||||
}
|
||||
|
||||
stepMs := step * 1000
|
||||
|
||||
// when the window is too small to be cached, we return the entire range as a miss
|
||||
if (start + stepMs) > end {
|
||||
return []MissInterval{{Start: start, End: end}}
|
||||
return []*MissInterval{{Start: start, End: end}}
|
||||
}
|
||||
|
||||
cachedSeriesDataList := q.getCachedSeriesData(cacheKey)
|
||||
cachedSeriesDataList := q.getCachedSeriesData(ctx, cacheKey)
|
||||
|
||||
// Sort the cached data by start time
|
||||
sort.Slice(cachedSeriesDataList, func(i, j int) bool {
|
||||
return cachedSeriesDataList[i].Start < cachedSeriesDataList[j].Start
|
||||
})
|
||||
|
||||
zap.L().Info("Number of non-overlapping cached series data", zap.Int("count", len(cachedSeriesDataList)))
|
||||
// TODO[@vikrantgupta25]: added logger here
|
||||
// zap.L().Info("Number of non-overlapping cached series data", zap.Int("count", len(cachedSeriesDataList)))
|
||||
|
||||
// Exclude the flux interval from the cached end time
|
||||
|
||||
// Why do we use `time.Now()` here?
|
||||
// When querying for a range [start, now())
|
||||
// we don't want to use the cached data inside the flux interval period
|
||||
@@ -90,13 +102,13 @@ func (q *queryCache) FindMissingTimeRangesV2(start, end int64, step int64, cache
|
||||
)
|
||||
}
|
||||
|
||||
var missingRanges []MissInterval
|
||||
var missingRanges []*MissInterval
|
||||
currentTime := start
|
||||
|
||||
// check if start is a complete aggregation window if not then add it as a miss
|
||||
if start%stepMs != 0 {
|
||||
nextAggStart := start - (start % stepMs) + stepMs
|
||||
missingRanges = append(missingRanges, MissInterval{Start: start, End: nextAggStart})
|
||||
missingRanges = append(missingRanges, &MissInterval{Start: start, End: nextAggStart})
|
||||
currentTime = nextAggStart
|
||||
}
|
||||
|
||||
@@ -112,7 +124,7 @@ func (q *queryCache) FindMissingTimeRangesV2(start, end int64, step int64, cache
|
||||
|
||||
// Add missing range if there's a gap
|
||||
if currentTime < data.Start {
|
||||
missingRanges = append(missingRanges, MissInterval{Start: currentTime, End: min(data.Start, end)})
|
||||
missingRanges = append(missingRanges, &MissInterval{Start: currentTime, End: min(data.Start, end)})
|
||||
}
|
||||
|
||||
// Update currentTime, but don't go past the end time
|
||||
@@ -127,20 +139,20 @@ func (q *queryCache) FindMissingTimeRangesV2(start, end int64, step int64, cache
|
||||
// The test case "start lies near the start of aggregation interval and end lies near the end of another aggregation interval"
|
||||
// shows this case.
|
||||
if currentTime < end {
|
||||
missingRanges = append(missingRanges, MissInterval{Start: currentTime, End: end})
|
||||
missingRanges = append(missingRanges, &MissInterval{Start: currentTime, End: end})
|
||||
} else if end%stepMs != 0 {
|
||||
// check if end is a complete aggregation window if not then add it as a miss
|
||||
prevAggEnd := end - (end % stepMs)
|
||||
missingRanges = append(missingRanges, MissInterval{Start: prevAggEnd, End: end})
|
||||
missingRanges = append(missingRanges, &MissInterval{Start: prevAggEnd, End: end})
|
||||
}
|
||||
|
||||
// Merge overlapping or adjacent missing ranges
|
||||
if len(missingRanges) <= 1 {
|
||||
return missingRanges
|
||||
}
|
||||
merged := []MissInterval{missingRanges[0]}
|
||||
merged := []*MissInterval{missingRanges[0]}
|
||||
for _, curr := range missingRanges[1:] {
|
||||
last := &merged[len(merged)-1]
|
||||
last := merged[len(merged)-1]
|
||||
if last.End >= curr.Start {
|
||||
last.End = max(last.End, curr.End)
|
||||
} else {
|
||||
@@ -151,19 +163,20 @@ func (q *queryCache) FindMissingTimeRangesV2(start, end int64, step int64, cache
|
||||
return merged
|
||||
}
|
||||
|
||||
func (q *queryCache) FindMissingTimeRanges(start, end, step int64, cacheKey string) []MissInterval {
|
||||
func (q *queryCache) FindMissingTimeRanges(ctx context.Context, start, end, step int64, cacheKey string) []*MissInterval {
|
||||
if q.cache == nil || cacheKey == "" {
|
||||
return []MissInterval{{Start: start, End: end}}
|
||||
return []*MissInterval{{Start: start, End: end}}
|
||||
}
|
||||
|
||||
cachedSeriesDataList := q.getCachedSeriesData(cacheKey)
|
||||
cachedSeriesDataList := q.getCachedSeriesData(ctx, cacheKey)
|
||||
|
||||
// Sort the cached data by start time
|
||||
sort.Slice(cachedSeriesDataList, func(i, j int) bool {
|
||||
return cachedSeriesDataList[i].Start < cachedSeriesDataList[j].Start
|
||||
})
|
||||
|
||||
zap.L().Info("Number of non-overlapping cached series data", zap.Int("count", len(cachedSeriesDataList)))
|
||||
// TODO[@vikrantgupta25]: added logger here
|
||||
// zap.L().Info("Number of non-overlapping cached series data", zap.Int("count", len(cachedSeriesDataList)))
|
||||
|
||||
// Exclude the flux interval from the cached end time
|
||||
|
||||
@@ -187,7 +200,7 @@ func (q *queryCache) FindMissingTimeRanges(start, end, step int64, cacheKey stri
|
||||
)
|
||||
}
|
||||
|
||||
var missingRanges []MissInterval
|
||||
var missingRanges []*MissInterval
|
||||
currentTime := start
|
||||
|
||||
for _, data := range cachedSeriesDataList {
|
||||
@@ -202,7 +215,7 @@ func (q *queryCache) FindMissingTimeRanges(start, end, step int64, cacheKey stri
|
||||
|
||||
// Add missing range if there's a gap
|
||||
if currentTime < data.Start {
|
||||
missingRanges = append(missingRanges, MissInterval{Start: currentTime, End: min(data.Start, end)})
|
||||
missingRanges = append(missingRanges, &MissInterval{Start: currentTime, End: min(data.Start, end)})
|
||||
}
|
||||
|
||||
// Update currentTime, but don't go past the end time
|
||||
@@ -211,19 +224,20 @@ func (q *queryCache) FindMissingTimeRanges(start, end, step int64, cacheKey stri
|
||||
|
||||
// Add final missing range if necessary
|
||||
if currentTime < end {
|
||||
missingRanges = append(missingRanges, MissInterval{Start: currentTime, End: end})
|
||||
missingRanges = append(missingRanges, &MissInterval{Start: currentTime, End: end})
|
||||
}
|
||||
|
||||
return missingRanges
|
||||
}
|
||||
|
||||
func (q *queryCache) getCachedSeriesData(cacheKey string) []*CachedSeriesData {
|
||||
cachedData, _, _ := q.cache.Retrieve(cacheKey, true)
|
||||
var cachedSeriesDataList []*CachedSeriesData
|
||||
if err := json.Unmarshal(cachedData, &cachedSeriesDataList); err != nil {
|
||||
func (q *queryCache) getCachedSeriesData(ctx context.Context, cacheKey string) []*SeriesData {
|
||||
cachedSeriesData := new(CachedSeriesData)
|
||||
_, err := q.cache.Retrieve(ctx, cacheKey, cachedSeriesData, true)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
return cachedSeriesDataList
|
||||
|
||||
return cachedSeriesData.Series
|
||||
}
|
||||
|
||||
func (q *queryCache) mergeSeries(cachedSeries, missedSeries []*v3.Series) []*v3.Series {
|
||||
@@ -263,45 +277,40 @@ func (q *queryCache) mergeSeries(cachedSeries, missedSeries []*v3.Series) []*v3.
|
||||
return mergedSeries
|
||||
}
|
||||
|
||||
func (q *queryCache) storeMergedData(cacheKey string, mergedData []CachedSeriesData) {
|
||||
func (q *queryCache) storeMergedData(ctx context.Context, cacheKey string, mergedData []*SeriesData) {
|
||||
if q.cache == nil {
|
||||
return
|
||||
}
|
||||
mergedDataJSON, err := json.Marshal(mergedData)
|
||||
if err != nil {
|
||||
zap.L().Error("error marshalling merged data", zap.Error(err))
|
||||
return
|
||||
}
|
||||
err = q.cache.Store(cacheKey, mergedDataJSON, 0)
|
||||
cachedSeriesData := CachedSeriesData{Series: mergedData}
|
||||
err := q.cache.Store(ctx, cacheKey, &cachedSeriesData, 0)
|
||||
if err != nil {
|
||||
zap.L().Error("error storing merged data", zap.Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
func (q *queryCache) MergeWithCachedSeriesDataV2(cacheKey string, newData []CachedSeriesData) []CachedSeriesData {
|
||||
func (q *queryCache) MergeWithCachedSeriesDataV2(ctx context.Context, cacheKey string, newData []*SeriesData) []*SeriesData {
|
||||
if q.cache == nil {
|
||||
return newData
|
||||
}
|
||||
|
||||
cachedData, _, _ := q.cache.Retrieve(cacheKey, true)
|
||||
var existingData []CachedSeriesData
|
||||
if err := json.Unmarshal(cachedData, &existingData); err != nil {
|
||||
zap.L().Error("error unmarshalling existing data", zap.Error(err))
|
||||
return newData
|
||||
cachedSeriesData := new(CachedSeriesData)
|
||||
_, err := q.cache.Retrieve(ctx, cacheKey, cachedSeriesData, true)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
allData := append(existingData, newData...)
|
||||
allData := append(cachedSeriesData.Series, newData...)
|
||||
|
||||
sort.Slice(allData, func(i, j int) bool {
|
||||
return allData[i].Start < allData[j].Start
|
||||
})
|
||||
|
||||
var mergedData []CachedSeriesData
|
||||
var current *CachedSeriesData
|
||||
var mergedData []*SeriesData
|
||||
var current *SeriesData
|
||||
|
||||
for _, data := range allData {
|
||||
if current == nil {
|
||||
current = &CachedSeriesData{
|
||||
current = &SeriesData{
|
||||
Start: data.Start,
|
||||
End: data.End,
|
||||
Data: data.Data,
|
||||
@@ -316,9 +325,9 @@ func (q *queryCache) MergeWithCachedSeriesDataV2(cacheKey string, newData []Cach
|
||||
current.Data = q.mergeSeries(current.Data, data.Data)
|
||||
} else {
|
||||
// No overlap, add current to mergedData
|
||||
mergedData = append(mergedData, *current)
|
||||
mergedData = append(mergedData, current)
|
||||
// Start new current
|
||||
current = &CachedSeriesData{
|
||||
current = &SeriesData{
|
||||
Start: data.Start,
|
||||
End: data.End,
|
||||
Data: data.Data,
|
||||
@@ -328,19 +337,19 @@ func (q *queryCache) MergeWithCachedSeriesDataV2(cacheKey string, newData []Cach
|
||||
|
||||
// After the loop, add the last current
|
||||
if current != nil {
|
||||
mergedData = append(mergedData, *current)
|
||||
mergedData = append(mergedData, current)
|
||||
}
|
||||
|
||||
return mergedData
|
||||
}
|
||||
|
||||
func (q *queryCache) MergeWithCachedSeriesData(cacheKey string, newData []CachedSeriesData) []CachedSeriesData {
|
||||
func (q *queryCache) MergeWithCachedSeriesData(ctx context.Context, cacheKey string, newData []*SeriesData) []*SeriesData {
|
||||
|
||||
mergedData := q.MergeWithCachedSeriesDataV2(cacheKey, newData)
|
||||
q.storeMergedData(cacheKey, mergedData)
|
||||
mergedData := q.MergeWithCachedSeriesDataV2(ctx, cacheKey, newData)
|
||||
q.storeMergedData(ctx, cacheKey, mergedData)
|
||||
return mergedData
|
||||
}
|
||||
|
||||
func (q *queryCache) StoreSeriesInCache(cacheKey string, series []CachedSeriesData) {
|
||||
q.storeMergedData(cacheKey, series)
|
||||
func (q *queryCache) StoreSeriesInCache(ctx context.Context, cacheKey string, series []*SeriesData) {
|
||||
q.storeMergedData(ctx, cacheKey, series)
|
||||
}
|
||||
@@ -1,25 +1,29 @@
|
||||
package querycache_test
|
||||
package querybuildertypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache/inmemory"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/cache/memorycache"
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestFindMissingTimeRanges(t *testing.T) {
|
||||
// Initialize the mock cache
|
||||
mockCache := inmemory.New(&inmemory.Options{TTL: 5 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
|
||||
// Create a queryCache instance with the mock cache and a fluxInterval
|
||||
q := querycache.NewQueryCache(
|
||||
querycache.WithCache(mockCache),
|
||||
querycache.WithFluxInterval(0), // Set to zero for testing purposes
|
||||
)
|
||||
opts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
qc := NewQueryCache(WithCache(c), WithFluxInterval(0))
|
||||
|
||||
// Define the test cases
|
||||
testCases := []struct {
|
||||
@@ -28,8 +32,8 @@ func TestFindMissingTimeRanges(t *testing.T) {
|
||||
requestedEnd int64 // in milliseconds
|
||||
step int64 // in seconds
|
||||
cacheKey string
|
||||
cachedData []querycache.CachedSeriesData
|
||||
expectedMiss []querycache.MissInterval
|
||||
cachedData []*SeriesData
|
||||
expectedMiss []*MissInterval
|
||||
}{
|
||||
{
|
||||
name: "Cached time range is a subset of the requested time range",
|
||||
@@ -37,14 +41,14 @@ func TestFindMissingTimeRanges(t *testing.T) {
|
||||
requestedEnd: 5000,
|
||||
step: 60,
|
||||
cacheKey: "testKey1",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{
|
||||
Start: 2000,
|
||||
End: 3000,
|
||||
Data: []*v3.Series{}, // Data can be empty for this test
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1000, End: 2000},
|
||||
{Start: 3000, End: 5000},
|
||||
},
|
||||
@@ -55,7 +59,7 @@ func TestFindMissingTimeRanges(t *testing.T) {
|
||||
requestedEnd: 3000,
|
||||
step: 60,
|
||||
cacheKey: "testKey2",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{
|
||||
Start: 1000,
|
||||
End: 4000,
|
||||
@@ -70,14 +74,14 @@ func TestFindMissingTimeRanges(t *testing.T) {
|
||||
requestedEnd: 4000,
|
||||
step: 60,
|
||||
cacheKey: "testKey3",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{
|
||||
Start: 1000,
|
||||
End: 2500,
|
||||
Data: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 2500, End: 4000},
|
||||
},
|
||||
},
|
||||
@@ -87,14 +91,14 @@ func TestFindMissingTimeRanges(t *testing.T) {
|
||||
requestedEnd: 4000,
|
||||
step: 60,
|
||||
cacheKey: "testKey4",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{
|
||||
Start: 3500,
|
||||
End: 5000,
|
||||
Data: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 2000, End: 3500},
|
||||
},
|
||||
},
|
||||
@@ -104,14 +108,14 @@ func TestFindMissingTimeRanges(t *testing.T) {
|
||||
requestedEnd: 4000,
|
||||
step: 60,
|
||||
cacheKey: "testKey5",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{
|
||||
Start: 5000,
|
||||
End: 6000,
|
||||
Data: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 2000, End: 4000},
|
||||
},
|
||||
},
|
||||
@@ -122,12 +126,12 @@ func TestFindMissingTimeRanges(t *testing.T) {
|
||||
requestedEnd: 5000,
|
||||
step: 60,
|
||||
cacheKey: "testKey6",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{Start: 1100, End: 1200, Data: []*v3.Series{}},
|
||||
{Start: 1300, End: 1400, Data: []*v3.Series{}},
|
||||
{Start: 1500, End: 1600, Data: []*v3.Series{}},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1000, End: 1100},
|
||||
{Start: 1200, End: 1300},
|
||||
{Start: 1400, End: 1500},
|
||||
@@ -140,13 +144,13 @@ func TestFindMissingTimeRanges(t *testing.T) {
|
||||
requestedEnd: 2000,
|
||||
step: 60,
|
||||
cacheKey: "testKey7",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{Start: 1000, End: 1100, Data: []*v3.Series{}},
|
||||
{Start: 1200, End: 1300, Data: []*v3.Series{}},
|
||||
{Start: 1400, End: 1500, Data: []*v3.Series{}},
|
||||
{Start: 1600, End: 1700, Data: []*v3.Series{}},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1100, End: 1200},
|
||||
{Start: 1300, End: 1400},
|
||||
{Start: 1500, End: 1600},
|
||||
@@ -159,11 +163,11 @@ func TestFindMissingTimeRanges(t *testing.T) {
|
||||
requestedEnd: 3000,
|
||||
step: 60,
|
||||
cacheKey: "testKey8",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{Start: 1000, End: 1500, Data: []*v3.Series{}},
|
||||
{Start: 3500, End: 4000, Data: []*v3.Series{}},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 2000, End: 3000},
|
||||
},
|
||||
},
|
||||
@@ -174,7 +178,7 @@ func TestFindMissingTimeRanges(t *testing.T) {
|
||||
step: 60,
|
||||
cacheKey: "testKey10",
|
||||
cachedData: nil,
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1000, End: 2000},
|
||||
},
|
||||
},
|
||||
@@ -184,13 +188,13 @@ func TestFindMissingTimeRanges(t *testing.T) {
|
||||
requestedEnd: 5000,
|
||||
step: 60,
|
||||
cacheKey: "testKey11",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{Start: 1000, End: 2000, Data: []*v3.Series{}},
|
||||
{Start: 1500, End: 2500, Data: []*v3.Series{}}, // Overlaps with previous
|
||||
{Start: 3000, End: 3500, Data: []*v3.Series{}},
|
||||
{Start: 4000, End: 4500, Data: []*v3.Series{}},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 2500, End: 3000},
|
||||
{Start: 3500, End: 4000},
|
||||
{Start: 4500, End: 5000},
|
||||
@@ -202,11 +206,11 @@ func TestFindMissingTimeRanges(t *testing.T) {
|
||||
requestedEnd: 5000,
|
||||
step: 60,
|
||||
cacheKey: "testKey12",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{Start: 1000, End: 1500, Data: []*v3.Series{}},
|
||||
{Start: 4500, End: 5000, Data: []*v3.Series{}},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1500, End: 4500},
|
||||
},
|
||||
},
|
||||
@@ -217,14 +221,13 @@ func TestFindMissingTimeRanges(t *testing.T) {
|
||||
|
||||
// Store the cached data in the mock cache
|
||||
if len(tc.cachedData) > 0 {
|
||||
cachedDataJSON, err := json.Marshal(tc.cachedData)
|
||||
assert.NoError(t, err)
|
||||
err = mockCache.Store(tc.cacheKey, cachedDataJSON, 0)
|
||||
data := CachedSeriesData{Series: tc.cachedData}
|
||||
err = c.Store(context.Background(), tc.cacheKey, &data, 0)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
// Call FindMissingTimeRanges
|
||||
missingRanges := q.FindMissingTimeRanges(tc.requestedStart, tc.requestedEnd, tc.step, tc.cacheKey)
|
||||
missingRanges := qc.FindMissingTimeRanges(context.Background(), tc.requestedStart, tc.requestedEnd, tc.step, tc.cacheKey)
|
||||
|
||||
// Verify the missing ranges
|
||||
assert.Equal(t, tc.expectedMiss, missingRanges)
|
||||
@@ -234,13 +237,15 @@ func TestFindMissingTimeRanges(t *testing.T) {
|
||||
|
||||
func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
// Initialize the mock cache
|
||||
mockCache := inmemory.New(&inmemory.Options{TTL: 5 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
|
||||
// Create a queryCache instance with the mock cache and a fluxInterval
|
||||
q := querycache.NewQueryCache(
|
||||
querycache.WithCache(mockCache),
|
||||
querycache.WithFluxInterval(0), // Set to zero for testing purposes
|
||||
)
|
||||
opts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
qc := NewQueryCache(WithCache(c), WithFluxInterval(0))
|
||||
|
||||
// Define the test cases
|
||||
testCases := []struct {
|
||||
@@ -249,8 +254,8 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd int64 // in milliseconds
|
||||
step int64 // in seconds
|
||||
cacheKey string
|
||||
cachedData []querycache.CachedSeriesData
|
||||
expectedMiss []querycache.MissInterval
|
||||
cachedData []*SeriesData
|
||||
expectedMiss []*MissInterval
|
||||
}{
|
||||
{
|
||||
name: "Cached time range is a subset of the requested time range",
|
||||
@@ -258,14 +263,14 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738836000000, // 06 Feb 2025 10:00:00
|
||||
step: 60,
|
||||
cacheKey: "testKey1",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{
|
||||
Start: 1738576800000, // 03 Feb 2025 10:00:00
|
||||
End: 1738749600000, // 05 Feb 2025 10:00:00
|
||||
Data: []*v3.Series{}, // Data can be empty for this test
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1738404000000, End: 1738576800000}, // 01 Feb 2025 10:00:00 - 03 Feb 2025 10:00:00
|
||||
{Start: 1738749600000, End: 1738836000000}, // 05 Feb 2025 10:00:00 - 06 Feb 2025 10:00:00
|
||||
},
|
||||
@@ -276,7 +281,7 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738749600000, // 05 Feb 2025 10:00:00
|
||||
step: 60,
|
||||
cacheKey: "testKey2",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{
|
||||
Start: 1738404000000, // 01 Feb 2025 10:00:00
|
||||
End: 1738836000000, // 06 Feb 2025 10:00:00
|
||||
@@ -291,14 +296,14 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738836000000, // 06 Feb 2025 10:00:00
|
||||
step: 60,
|
||||
cacheKey: "testKey3",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{
|
||||
Start: 1738404000000, // 01 Feb 2025 10:00:00
|
||||
End: 1738663200000, // 04 Feb 2025 10:00:00
|
||||
Data: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1738663200000, End: 1738836000000}, // 04 Feb 2025 10:00:00 - 06 Feb 2025 10:00:00
|
||||
},
|
||||
},
|
||||
@@ -308,14 +313,14 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738576800000, // 03 Feb 2025 10:00:00
|
||||
step: 60,
|
||||
cacheKey: "testKey4",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{
|
||||
Start: 1738490400000, // 02 Feb 2025 10:00:00
|
||||
End: 1738663200000, // 04 Feb 2025 10:00:00
|
||||
Data: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1738404000000, End: 1738490400000}, // 01 Feb 2025 10:00:00 - 02 Feb 2025 10:00:00
|
||||
},
|
||||
},
|
||||
@@ -325,14 +330,14 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738576800000, // 03 Feb 2025 10:00:00
|
||||
step: 60,
|
||||
cacheKey: "testKey5",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{
|
||||
Start: 1738836000000, // 06 Feb 2025 10:00:00
|
||||
End: 1739008800000, // 08 Feb 2025 10:00:00
|
||||
Data: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1738404000000, End: 1738576800000}, // 01 Feb 2025 10:00:00 - 03 Feb 2025 10:00:00
|
||||
},
|
||||
},
|
||||
@@ -343,7 +348,7 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738836000000, // 06 Feb 2025 10:00:00
|
||||
step: 60,
|
||||
cacheKey: "testKey6",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{
|
||||
Start: 1738490400000, // 02 Feb 2025 10:00:00
|
||||
End: 1738576800000, // 03 Feb 2025 10:00:00
|
||||
@@ -360,7 +365,7 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
Data: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1738404000000, End: 1738490400000}, // 01 Feb 2025 10:00:00 - 02 Feb 2025 10:00:00
|
||||
{Start: 1738576800000, End: 1738663200000}, // 03 Feb 2025 10:00:00 - 04 Feb 2025 10:00:00
|
||||
{Start: 1738749600000, End: 1738836000000}, // 05 Feb 2025 10:00:00 - 06 Feb 2025 10:00:00
|
||||
@@ -372,13 +377,13 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738490400000, // 02 Feb 2025 10:00:00
|
||||
step: 60,
|
||||
cacheKey: "testKey7",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{Start: 1738404000000, End: 1738418400000, Data: []*v3.Series{}}, // 01 Feb 2025 10:00:00 - 14:00:00
|
||||
{Start: 1738425600000, End: 1738432800000, Data: []*v3.Series{}}, // 01 Feb 2025 16:00:00 - 18:00:00
|
||||
{Start: 1738440000000, End: 1738447200000, Data: []*v3.Series{}}, // 01 Feb 2025 20:00:00 - 22:00:00
|
||||
{Start: 1738454400000, End: 1738461600000, Data: []*v3.Series{}}, // 02 Feb 2025 00:00:00 - 02:00:00
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
// {Start: 1738404000000, End: 1738404060000}, // 01 Feb 2025 10:00:00 - 10:01:00
|
||||
{Start: 1738418400000, End: 1738425600000}, // 01 Feb 2025 14:00:00 - 16:00:00
|
||||
{Start: 1738432800000, End: 1738440000000}, // 01 Feb 2025 18:00:00 - 20:00:00
|
||||
@@ -392,11 +397,11 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738576800000, // 03 Feb 2025 10:00:00
|
||||
step: 60,
|
||||
cacheKey: "testKey8",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{Start: 1738404000000, End: 1738447200000, Data: []*v3.Series{}}, // 01 Feb 2025 10:00:00 - 22:00:00
|
||||
{Start: 1738620000000, End: 1738663200000, Data: []*v3.Series{}}, // 03 Feb 2025 22:00:00 - 04 Feb 2025 10:00:00
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1738490400000, End: 1738576800000}, // 02 Feb 2025 10:00:00 - 03 Feb 2025 10:00:00
|
||||
},
|
||||
},
|
||||
@@ -407,7 +412,7 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
step: 60,
|
||||
cacheKey: "testKey10",
|
||||
cachedData: nil,
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1738404000000, End: 1738490400000}, // 01 Feb 2025 10:00:00 - 02 Feb 2025 10:00:00
|
||||
},
|
||||
},
|
||||
@@ -417,13 +422,13 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738407600000, // 01 Feb 2025 11:00:00
|
||||
step: 60,
|
||||
cacheKey: "testKey11",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{Start: 1738404000000, End: 1738405200000, Data: []*v3.Series{}}, // 01 Feb 2025 10:00:00 - 10:20:00
|
||||
{Start: 1738404600000, End: 1738405200000, Data: []*v3.Series{}}, // 01 Feb 2025 10:10:00 - 10:20:00
|
||||
{Start: 1738406100000, End: 1738406700000, Data: []*v3.Series{}}, // 01 Feb 2025 10:35:00 - 10:45:00
|
||||
{Start: 1738407000000, End: 1738407300000, Data: []*v3.Series{}}, // 01 Feb 2025 10:50:00 - 10:55:00
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1738405200000, End: 1738406100000}, // 01 Feb 2025 10:20:00 - 10:35:00
|
||||
{Start: 1738406700000, End: 1738407000000}, // 01 Feb 2025 10:45:00 - 10:50:00
|
||||
{Start: 1738407300000, End: 1738407600000}, // 01 Feb 2025 10:55:00 - 11:00:00
|
||||
@@ -435,11 +440,11 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738407600000, // 01 Feb 2025 11:00:00
|
||||
step: 60,
|
||||
cacheKey: "testKey12",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{Start: 1738404000000, End: 1738405200000, Data: []*v3.Series{}}, // 01 Feb 2025 10:00:00 - 10:20:00
|
||||
{Start: 1738406400000, End: 1738407600000, Data: []*v3.Series{}}, // 01 Feb 2025 10:40:00 - 11:00:00
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1738405200000, End: 1738406400000}, // 01 Feb 2025 10:20:00 - 10:40:00
|
||||
},
|
||||
},
|
||||
@@ -449,10 +454,10 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738576800001,
|
||||
step: 60,
|
||||
cacheKey: "testKey13",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{Start: 1738576800000, End: 1738576860000, Data: []*v3.Series{}},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{{Start: 1738576800000, End: 1738576800001}},
|
||||
expectedMiss: []*MissInterval{{Start: 1738576800000, End: 1738576800001}},
|
||||
},
|
||||
{
|
||||
name: "requested data is exactly one step or aggregation window",
|
||||
@@ -460,7 +465,7 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738576860000,
|
||||
step: 60,
|
||||
cacheKey: "testKey13",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{Start: 1738576800000, End: 1738576860000, Data: []*v3.Series{}},
|
||||
},
|
||||
expectedMiss: nil,
|
||||
@@ -471,14 +476,14 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738749600000, // 05 Feb 2025 10:00:00
|
||||
step: 86400, // 24 hours
|
||||
cacheKey: "testKey13",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{
|
||||
Start: 1738540800000, // 03 Feb 2025 00:00:00
|
||||
End: 1738713600000, // 05 Feb 2025 00:00:00
|
||||
Data: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1738576800000, End: 1738627200000}, // 03 Feb 2025 10:00:00 - 04 Feb 2025 00:00:00
|
||||
{Start: 1738713600000, End: 1738749600000}, // 05 Feb 2025 00:00:00 - 05 Feb 2025 10:00:00
|
||||
},
|
||||
@@ -489,14 +494,14 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738749600000, // 05 Feb 2025 10:00:00
|
||||
step: 86400, // 24 hours
|
||||
cacheKey: "testKey13",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{
|
||||
Start: 1738540800000, // 03 Feb 2025 00:00:00
|
||||
End: 1738713600000, // 05 Feb 2025 00:00:00
|
||||
Data: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1738713600000, End: 1738749600000}, // 05 Feb 2025 00:00:00 - 05 Feb 2025 10:00:00
|
||||
},
|
||||
},
|
||||
@@ -506,14 +511,14 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738713000000, // 04 Feb 2025 11:50:00
|
||||
step: 86400, // 24 hours
|
||||
cacheKey: "testKey13",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{
|
||||
Start: 1738540800000, // 03 Feb 2025 00:00:00
|
||||
End: 1738713600000, // 05 Feb 2025 00:00:00
|
||||
Data: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1738541400000, End: 1738713000000}, // 03 Feb 2025 00:10:00 - 04 Feb 2025 11:50:00
|
||||
},
|
||||
},
|
||||
@@ -523,14 +528,14 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738713000000, // 04 Feb 2025 11:50:00
|
||||
step: 86400, // 24 hours
|
||||
cacheKey: "testKey13",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{
|
||||
Start: 1738540800000, // 03 Feb 2025 00:00:00
|
||||
End: 1738713600000, // 05 Feb 2025 00:00:00
|
||||
Data: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1738411859000, End: 1738540800000}, // 01 Feb 2025 00:10:00 - 03 Feb 2025 00:00:00
|
||||
{Start: 1738627200000, End: 1738713000000}, // 04 Feb 2025 00:00:00 - 04 Feb 2025 11:50:00
|
||||
},
|
||||
@@ -541,14 +546,14 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
requestedEnd: 1738713600000, // 05 Feb 2025 00:00:00
|
||||
step: 86400, // 24 hours
|
||||
cacheKey: "testKey13",
|
||||
cachedData: []querycache.CachedSeriesData{
|
||||
cachedData: []*SeriesData{
|
||||
{
|
||||
Start: 1738540800000, // 03 Feb 2025 00:00:00
|
||||
End: 1738713600000, // 05 Feb 2025 00:00:00
|
||||
Data: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
expectedMiss: []querycache.MissInterval{
|
||||
expectedMiss: []*MissInterval{
|
||||
{Start: 1738498255000, End: 1738540800000}, // 03 Feb 2025 00:10:00 - 03 Feb 2025 00:00:00
|
||||
},
|
||||
},
|
||||
@@ -559,14 +564,14 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
|
||||
// Store the cached data in the mock cache
|
||||
if len(tc.cachedData) > 0 {
|
||||
cachedDataJSON, err := json.Marshal(tc.cachedData)
|
||||
data := CachedSeriesData{Series: tc.cachedData}
|
||||
err = c.Store(context.Background(), tc.cacheKey, &data, 0)
|
||||
assert.NoError(t, err)
|
||||
err = mockCache.Store(tc.cacheKey, cachedDataJSON, 0)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
// Call FindMissingTimeRanges
|
||||
missingRanges := q.FindMissingTimeRangesV2(tc.requestedStart, tc.requestedEnd, tc.step, tc.cacheKey)
|
||||
missingRanges := qc.FindMissingTimeRangesV2(context.Background(), tc.requestedStart, tc.requestedEnd, tc.step, tc.cacheKey)
|
||||
|
||||
// Verify the missing ranges
|
||||
assert.Equal(t, tc.expectedMiss, missingRanges)
|
||||
@@ -576,19 +581,21 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
|
||||
func TestMergeWithCachedSeriesData(t *testing.T) {
|
||||
// Initialize the mock cache
|
||||
mockCache := inmemory.New(&inmemory.Options{TTL: 5 * time.Minute, CleanupInterval: 10 * time.Minute})
|
||||
|
||||
// Create a queryCache instance with the mock cache and a fluxInterval
|
||||
q := querycache.NewQueryCache(
|
||||
querycache.WithCache(mockCache),
|
||||
querycache.WithFluxInterval(0), // Set to zero for testing purposes
|
||||
)
|
||||
opts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
}
|
||||
c, err := memorycache.New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
if err != nil {
|
||||
t.Errorf("error initialising cache: %v", err)
|
||||
}
|
||||
qc := NewQueryCache(WithCache(c), WithFluxInterval(0))
|
||||
|
||||
// Define test data
|
||||
cacheKey := "mergeTestKey"
|
||||
|
||||
// Existing cached data
|
||||
existingData := []querycache.CachedSeriesData{
|
||||
existingData := []*SeriesData{
|
||||
{
|
||||
Start: 1000,
|
||||
End: 2000,
|
||||
@@ -604,7 +611,7 @@ func TestMergeWithCachedSeriesData(t *testing.T) {
|
||||
}
|
||||
|
||||
// New data to merge
|
||||
newData := []querycache.CachedSeriesData{
|
||||
newData := []*SeriesData{
|
||||
{
|
||||
Start: 1500,
|
||||
End: 2500,
|
||||
@@ -626,7 +633,7 @@ func TestMergeWithCachedSeriesData(t *testing.T) {
|
||||
}
|
||||
|
||||
// Expected merged data
|
||||
expectedMergedData := []querycache.CachedSeriesData{
|
||||
expectedMergedData := []*SeriesData{
|
||||
{
|
||||
Start: 1000,
|
||||
End: 2500,
|
||||
@@ -649,13 +656,12 @@ func TestMergeWithCachedSeriesData(t *testing.T) {
|
||||
}
|
||||
|
||||
// Store existing data in cache
|
||||
cachedDataJSON, err := json.Marshal(existingData)
|
||||
assert.NoError(t, err)
|
||||
err = mockCache.Store(cacheKey, cachedDataJSON, 0)
|
||||
data := CachedSeriesData{Series: existingData}
|
||||
err = c.Store(context.Background(), cacheKey, &data, 0)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Call MergeWithCachedSeriesData
|
||||
mergedData := q.MergeWithCachedSeriesData(cacheKey, newData)
|
||||
mergedData := qc.MergeWithCachedSeriesData(context.Background(), cacheKey, newData)
|
||||
|
||||
// Verify the merged data
|
||||
assert.Equal(t, len(expectedMergedData), len(mergedData))
|
||||
Reference in New Issue
Block a user