mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-06 09:42:18 +00:00
Compare commits
63 Commits
testingtf
...
fix/bun-sl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7e290772f8 | ||
|
|
36d06c2127 | ||
|
|
705e75a0d7 | ||
|
|
16e3c185e9 | ||
|
|
8d6671e362 | ||
|
|
5b237ee628 | ||
|
|
dae2527521 | ||
|
|
7a65bbdd9d | ||
|
|
cb08ce5e5d | ||
|
|
3fbc3dec48 | ||
|
|
8478a07d7d | ||
|
|
1fbdcc7c1e | ||
|
|
768a95269d | ||
|
|
daee2135d0 | ||
|
|
0cbca568f5 | ||
|
|
9f24cf1587 | ||
|
|
b1bc2f81ba | ||
|
|
bfe122fc51 | ||
|
|
b0f347458c | ||
|
|
5b2f897a00 | ||
|
|
7a0f584e18 | ||
|
|
ecfab8b291 | ||
|
|
a90926014c | ||
|
|
39786931de | ||
|
|
73f57d8bee | ||
|
|
ab17bf3558 | ||
|
|
eb5a1b76b8 | ||
|
|
130ff925bd | ||
|
|
75d86cea60 | ||
|
|
cf451d335c | ||
|
|
e47c7cc17b | ||
|
|
629c54d3f9 | ||
|
|
ed3026eeb5 | ||
|
|
ccf26883c4 | ||
|
|
958924befe | ||
|
|
b70c570cdc | ||
|
|
42a026469b | ||
|
|
6de0908a62 | ||
|
|
fd21a4955e | ||
|
|
3dce13d29f | ||
|
|
2ce4b60c55 | ||
|
|
c9888804cd | ||
|
|
413b0d9fae | ||
|
|
b24095236f | ||
|
|
21d239ce68 | ||
|
|
d6e4e3c5ed | ||
|
|
552b103e8b | ||
|
|
1123a9a93d | ||
|
|
8b30e3cc5c | ||
|
|
b86e65d2ca | ||
|
|
d5e2841083 | ||
|
|
7dad5dcd17 | ||
|
|
ac0b640146 | ||
|
|
e125d146b5 | ||
|
|
a41ffceca4 | ||
|
|
7edb047c0c | ||
|
|
6504f2565b | ||
|
|
6b418a125b | ||
|
|
36827a1667 | ||
|
|
1118c56356 | ||
|
|
bd071e3e60 | ||
|
|
36f3a2e26d | ||
|
|
fee7e96176 |
@@ -1,5 +1,4 @@
|
||||
services:
|
||||
|
||||
clickhouse:
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
container_name: clickhouse
|
||||
@@ -24,7 +23,6 @@ services:
|
||||
retries: 3
|
||||
depends_on:
|
||||
- zookeeper
|
||||
|
||||
zookeeper:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
container_name: zookeeper
|
||||
@@ -41,9 +39,8 @@ services:
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:0.111.29
|
||||
image: signoz/signoz-schema-migrator:v0.111.40
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -55,9 +52,8 @@ services:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:0.111.29
|
||||
image: signoz/signoz-schema-migrator:v0.111.40
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
@@ -11,3 +11,4 @@
|
||||
/pkg/errors/ @grandwizard28
|
||||
/pkg/factory/ @grandwizard28
|
||||
/pkg/types/ @grandwizard28
|
||||
/pkg/sqlmigration/ @vikrantgupta25
|
||||
|
||||
1
.github/workflows/build-enterprise.yaml
vendored
1
.github/workflows/build-enterprise.yaml
vendored
@@ -69,6 +69,7 @@ jobs:
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
|
||||
echo 'USERPILOT_KEY="${{ secrets.USERPILOT_KEY }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
|
||||
5
.github/workflows/build-staging.yaml
vendored
5
.github/workflows/build-staging.yaml
vendored
@@ -64,8 +64,9 @@ jobs:
|
||||
run: |
|
||||
mkdir -p frontend
|
||||
echo 'CI=1' > frontend/.env
|
||||
echo 'TUNNEL_URL=https://telemetry.staging.signoz.cloud/tunnel' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN=https://telemetry.staging.signoz.cloud' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.NP_TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.NP_TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'USERPILOT_KEY="${{ secrets.NP_USERPILOT_KEY }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
|
||||
1
.github/workflows/gor-signoz.yaml
vendored
1
.github/workflows/gor-signoz.yaml
vendored
@@ -35,6 +35,7 @@ jobs:
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> .env
|
||||
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> .env
|
||||
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> .env
|
||||
echo 'USERPILOT_KEY="${{ secrets.USERPILOT_KEY }}"' >> .env
|
||||
- name: build-frontend
|
||||
run: make js-build
|
||||
- name: upload-frontend-artifact
|
||||
|
||||
@@ -50,7 +50,7 @@ cache:
|
||||
# Time-to-live for cache entries in memory. Specify the duration in ns
|
||||
ttl: 60000000000
|
||||
# The interval at which the cache will be cleaned up
|
||||
cleanupInterval: 1m
|
||||
cleanup_interval: 1m
|
||||
# redis: Uses Redis as the caching backend.
|
||||
redis:
|
||||
# The hostname or IP address of the Redis server.
|
||||
|
||||
@@ -174,7 +174,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.80.0
|
||||
image: signoz/signoz:v0.81.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
@@ -208,7 +208,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.39
|
||||
image: signoz/signoz-otel-collector:v0.111.40
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -232,7 +232,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.39
|
||||
image: signoz/signoz-schema-migrator:v0.111.40
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.80.0
|
||||
image: signoz/signoz:v0.81.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
- --use-logs-new-schema=true
|
||||
@@ -143,7 +143,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.39
|
||||
image: signoz/signoz-otel-collector:v0.111.40
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -167,7 +167,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.39
|
||||
image: signoz/signoz-schema-migrator:v0.111.40
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -177,7 +177,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.80.0}
|
||||
image: signoz/signoz:${VERSION:-v0.81.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -212,7 +212,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.40}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -238,7 +238,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.40}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -249,7 +249,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.40}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.80.0}
|
||||
image: signoz/signoz:${VERSION:-v0.81.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -144,7 +144,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.40}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +166,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.40}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +178,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.40}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -93,7 +93,7 @@ check_os() {
|
||||
;;
|
||||
Red\ Hat*)
|
||||
desired_os=1
|
||||
os="red hat"
|
||||
os="rhel"
|
||||
package_manager="yum"
|
||||
;;
|
||||
CentOS*)
|
||||
|
||||
103
docs/contributing/go/errors.md
Normal file
103
docs/contributing/go/errors.md
Normal file
@@ -0,0 +1,103 @@
|
||||
# Errors
|
||||
|
||||
SigNoz includes its own structured [errors](/pkg/errors/errors.go) package. It's built on top of Go's `error` interface, extending it to add additional context that helps provide more meaningful error messages throughout the application.
|
||||
|
||||
## How to use it?
|
||||
|
||||
To use the SigNoz structured errors package, use these functions instead of the standard library alternatives:
|
||||
|
||||
```go
|
||||
// Instead of errors.New()
|
||||
errors.New(typ, code, message)
|
||||
|
||||
// Instead of fmt.Errorf()
|
||||
errors.Newf(typ, code, message, args...)
|
||||
```
|
||||
|
||||
### Typ
|
||||
The Typ (read as Type, defined as `typ`) is used to categorize errors across the codebase and is loosely coupled with HTTP/GRPC status codes. All predefined types can be found in [pkg/errors/type.go](/pkg/errors/type.go). For example:
|
||||
|
||||
- `TypeInvalidInput` - Indicates invalid input was provided
|
||||
- `TypeNotFound` - Indicates a resource was not found
|
||||
|
||||
By design, `typ` is unexported and cannot be declared outside of [errors](/pkg/errors/errors.go) package. This ensures that it is consistent across the codebase and is used in a way that is meaningful.
|
||||
|
||||
### Code
|
||||
Codes are used to provide more granular categorization within types. For instance, a type of `TypeInvalidInput` might have codes like `CodeInvalidEmail` or `CodeInvalidPassword`.
|
||||
|
||||
To create new error codes, use the `errors.MustNewCode` function:
|
||||
|
||||
```go
|
||||
var (
|
||||
CodeThingAlreadyExists = errors.MustNewCode("thing_already_exists")
|
||||
CodeThingNotFound = errors.MustNewCode("thing_not_found")
|
||||
)
|
||||
```
|
||||
|
||||
> 💡 **Note**: Error codes must match the regex `^[a-z_]+$` otherwise the code will panic.
|
||||
|
||||
## Show me some examples
|
||||
|
||||
### Using the error
|
||||
A basic example of using the error:
|
||||
|
||||
```go
|
||||
var (
|
||||
CodeThingAlreadyExists = errors.MustNewCode("thing_already_exists")
|
||||
)
|
||||
|
||||
func CreateThing(id string) error {
|
||||
t, err := thing.GetFromStore(id)
|
||||
if err != nil {
|
||||
if errors.As(err, errors.TypeNotFound) {
|
||||
// thing was not found, create it
|
||||
return thing.Create(id)
|
||||
}
|
||||
|
||||
// something else went wrong, wrap the error with more context
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeUnknown, "failed to get thing from store")
|
||||
}
|
||||
|
||||
return errors.Newf(errors.TypeAlreadyExists, CodeThingAlreadyExists, "thing with id %s already exists", id)
|
||||
}
|
||||
```
|
||||
|
||||
### Changing the error
|
||||
Sometimes you may want to change the error while preserving the message:
|
||||
|
||||
```go
|
||||
func GetUserSecurely(id string) (*User, error) {
|
||||
user, err := repository.GetUser(id)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
// Convert NotFound to Forbidden for security reasons
|
||||
return nil, errors.New(errors.TypeForbidden, errors.CodeAccessDenied, "access denied to requested resource")
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return user, nil
|
||||
}
|
||||
```
|
||||
|
||||
## Why do we need this?
|
||||
|
||||
In a large codebase like SigNoz, error handling is critical for maintaining reliability, debuggability, and a good user experience. We believe that it is the **responsibility of a function** to return **well-defined** errors that **accurately describe what went wrong**. With our structured error system:
|
||||
|
||||
- Functions can create precise errors with appropriate additional context
|
||||
- Callers can make informed decisions based on the additional context
|
||||
- Error context is preserved and enhanced as it moves up the call stack
|
||||
|
||||
The caller (which can be another function or a HTTP/gRPC handler or something else entirely), can then choose to use this error to take appropriate actions such as:
|
||||
|
||||
- A function can branch into different paths based on the context
|
||||
- An HTTP/gRPC handler can derive the correct status code and message from the error and send it to the client
|
||||
- Logging systems can capture structured error information for better diagnostics
|
||||
|
||||
Although there might be cases where this might seem too verbose, it makes the code more maintainable and consistent. A little verbose code is better than clever code that doesn't provide enough context.
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- Think about error handling as you write your code, not as an afterthought.
|
||||
- Always use the [errors](/pkg/errors/errors.go) package instead of the standard library's `errors.New()` or `fmt.Errorf()`.
|
||||
- Always assign appropriate codes to errors when creating them instead of using the "catch all" error codes defined in [pkg/errors/code.go](/pkg/errors/code.go).
|
||||
- Use `errors.Wrapf()` to add context to errors while preserving the original when appropriate.
|
||||
11
docs/contributing/go/readme.md
Normal file
11
docs/contributing/go/readme.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# Go
|
||||
|
||||
This document provides an overview of contributing to the SigNoz backend written in Go. The SigNoz backend is built with Go, focusing on performance, maintainability, and developer experience. We strive for clean, idiomatic code that follows established Go practices while addressing the unique needs of an observability platform.
|
||||
|
||||
We adhere to three primary style guides as our foundation:
|
||||
|
||||
- [Effective Go](https://go.dev/doc/effective_go) - For writing idiomatic Go code
|
||||
- [Code Review Comments](https://go.dev/wiki/CodeReviewComments) - For understanding common comments in code reviews
|
||||
- [Google Style Guide](https://google.github.io/styleguide/go/) - Additional practices from Google
|
||||
|
||||
We **recommend** (almost enforce) reviewing these guides before contributing to the codebase. They provide valuable insights into writing idiomatic Go code and will help you understand our approach to backend development. In addition, we have a few additional rules that make certain areas stricter than the above which can be found in area-specific files in this package.
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type DailyProvider struct {
|
||||
@@ -37,7 +38,7 @@ func NewDailyProvider(opts ...GenericProviderOption[*DailyProvider]) *DailyProvi
|
||||
return dp
|
||||
}
|
||||
|
||||
func (p *DailyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
func (p *DailyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
req.Seasonality = SeasonalityDaily
|
||||
return p.getAnomalies(ctx, req)
|
||||
return p.getAnomalies(ctx, orgID, req)
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type HourlyProvider struct {
|
||||
@@ -37,7 +38,7 @@ func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyPr
|
||||
return hp
|
||||
}
|
||||
|
||||
func (p *HourlyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
func (p *HourlyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
req.Seasonality = SeasonalityHourly
|
||||
return p.getAnomalies(ctx, req)
|
||||
return p.getAnomalies(ctx, orgID, req)
|
||||
}
|
||||
|
||||
@@ -2,8 +2,10 @@ package anomaly
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Provider interface {
|
||||
GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error)
|
||||
GetAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error)
|
||||
}
|
||||
|
||||
@@ -5,11 +5,12 @@ import (
|
||||
"math"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
@@ -59,9 +60,9 @@ func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomaly
|
||||
return prepareAnomalyQueryParams(req.Params, req.Seasonality)
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQueryParams) (*anomalyQueryResults, error) {
|
||||
func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID, params *anomalyQueryParams) (*anomalyQueryResults, error) {
|
||||
zap.L().Info("fetching results for current period", zap.Any("currentPeriodQuery", params.CurrentPeriodQuery))
|
||||
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentPeriodQuery)
|
||||
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -72,7 +73,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past period", zap.Any("pastPeriodQuery", params.PastPeriodQuery))
|
||||
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.PastPeriodQuery)
|
||||
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -83,7 +84,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for current season", zap.Any("currentSeasonQuery", params.CurrentSeasonQuery))
|
||||
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentSeasonQuery)
|
||||
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -94,7 +95,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past season", zap.Any("pastSeasonQuery", params.PastSeasonQuery))
|
||||
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.PastSeasonQuery)
|
||||
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -105,7 +106,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past 2 season", zap.Any("past2SeasonQuery", params.Past2SeasonQuery))
|
||||
past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, params.Past2SeasonQuery)
|
||||
past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past2SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -116,7 +117,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQu
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past 3 season", zap.Any("past3SeasonQuery", params.Past3SeasonQuery))
|
||||
past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, params.Past3SeasonQuery)
|
||||
past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past3SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -335,9 +336,9 @@ func (p *BaseSeasonalProvider) getAnomalyScores(
|
||||
return anomalyScoreSeries
|
||||
}
|
||||
|
||||
func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
anomalyParams := p.getQueryParams(req)
|
||||
anomalyQueryResults, err := p.getResults(ctx, anomalyParams)
|
||||
anomalyQueryResults, err := p.getResults(ctx, orgID, anomalyParams)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type WeeklyProvider struct {
|
||||
@@ -36,7 +37,7 @@ func NewWeeklyProvider(opts ...GenericProviderOption[*WeeklyProvider]) *WeeklyPr
|
||||
return wp
|
||||
}
|
||||
|
||||
func (p *WeeklyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
func (p *WeeklyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
|
||||
req.Seasonality = SeasonalityWeekly
|
||||
return p.getAnomalies(ctx, req)
|
||||
return p.getAnomalies(ctx, orgID, req)
|
||||
}
|
||||
|
||||
@@ -13,11 +13,12 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
|
||||
quickfilterscore "github.com/SigNoz/signoz/pkg/modules/quickfilter/core"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
@@ -38,7 +39,6 @@ type APIHandlerOptions struct {
|
||||
IntegrationsController *integrations.Controller
|
||||
CloudIntegrationsController *cloudintegrations.Controller
|
||||
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
|
||||
Cache cache.Cache
|
||||
Gateway *httputil.ReverseProxy
|
||||
GatewayUrl string
|
||||
// Querier Influx Interval
|
||||
@@ -55,6 +55,8 @@ type APIHandler struct {
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
||||
quickfiltermodule := quickfilterscore.NewQuickFilters(quickfilterscore.NewStore(signoz.SQLStore))
|
||||
quickFilter := quickfilter.NewAPI(quickfiltermodule)
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
PreferSpanMetrics: opts.PreferSpanMetrics,
|
||||
@@ -64,11 +66,12 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
IntegrationsController: opts.IntegrationsController,
|
||||
CloudIntegrationsController: opts.CloudIntegrationsController,
|
||||
LogsParsingPipelineController: opts.LogsParsingPipelineController,
|
||||
Cache: opts.Cache,
|
||||
FluxInterval: opts.FluxInterval,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
FieldsAPI: fields.NewAPI(signoz.TelemetryStore),
|
||||
Signoz: signoz,
|
||||
QuickFilters: quickFilter,
|
||||
QuickFilterModule: quickfiltermodule,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -134,7 +134,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
_, registerError := baseauth.Register(ctx, req, ah.Signoz.Alertmanager, ah.Signoz.Modules.Organization)
|
||||
_, registerError := baseauth.Register(ctx, req, ah.Signoz.Alertmanager, ah.Signoz.Modules.Organization, ah.QuickFilterModule)
|
||||
if !registerError.IsNil() {
|
||||
RespondError(w, apierr, nil)
|
||||
return
|
||||
|
||||
@@ -119,7 +119,7 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
req.UpdatedByUserID = claims.UserID
|
||||
req.UpdatedAt = time.Now()
|
||||
zap.L().Info("Got UpdateSteps PAT request", zap.Any("pat", req))
|
||||
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
|
||||
var apierr basemodel.BaseApiError
|
||||
if apierr = ah.AppDao().UpdatePAT(r.Context(), claims.OrgID, req, id); apierr != nil {
|
||||
RespondError(w, apierr, nil)
|
||||
|
||||
@@ -7,14 +7,27 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
bodyBytes, _ := io.ReadAll(r.Body)
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
@@ -29,7 +42,7 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
queryRangeParams.Version = "v4"
|
||||
|
||||
// add temporality for each metric
|
||||
temporalityErr := aH.PopulateTemporality(r.Context(), queryRangeParams)
|
||||
temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams)
|
||||
if temporalityErr != nil {
|
||||
zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr))
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
|
||||
@@ -85,30 +98,30 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
switch seasonality {
|
||||
case anomaly.SeasonalityWeekly:
|
||||
provider = anomaly.NewWeeklyProvider(
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](aH.opts.Cache),
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](aH.Signoz.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](aH.opts.DataConnector),
|
||||
)
|
||||
case anomaly.SeasonalityDaily:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.Signoz.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
)
|
||||
case anomaly.SeasonalityHourly:
|
||||
provider = anomaly.NewHourlyProvider(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](aH.opts.Cache),
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](aH.Signoz.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](aH.opts.DataConnector),
|
||||
)
|
||||
default:
|
||||
provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.opts.Cache),
|
||||
anomaly.WithCache[*anomaly.DailyProvider](aH.Signoz.Cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](aH.opts.DataConnector),
|
||||
)
|
||||
}
|
||||
anomalies, err := provider.GetAnomalies(r.Context(), &anomaly.GetAnomaliesRequest{Params: queryRangeParams})
|
||||
anomalies, err := provider.GetAnomalies(r.Context(), orgID, &anomaly.GetAnomaliesRequest{Params: queryRangeParams})
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
@@ -41,7 +42,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
|
||||
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
@@ -57,7 +57,6 @@ type ServerOptions struct {
|
||||
HTTPHostPort string
|
||||
PrivateHostPort string
|
||||
PreferSpanMetrics bool
|
||||
CacheConfigPath string
|
||||
FluxInterval string
|
||||
FluxIntervalForTraceDetail string
|
||||
Cluster string
|
||||
@@ -134,19 +133,10 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
serverOptions.SigNoz.Cache,
|
||||
)
|
||||
|
||||
var c cache.Cache
|
||||
if serverOptions.CacheConfigPath != "" {
|
||||
cacheOpts, err := cache.LoadFromYAMLCacheConfigFile(serverOptions.CacheConfigPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c = cache.NewCache(cacheOpts)
|
||||
}
|
||||
|
||||
rm, err := makeRulesManager(
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
reader,
|
||||
c,
|
||||
serverOptions.SigNoz.Cache,
|
||||
serverOptions.SigNoz.Alertmanager,
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore,
|
||||
@@ -223,7 +213,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
IntegrationsController: integrationsController,
|
||||
CloudIntegrationsController: cloudIntegrationsController,
|
||||
LogsParsingPipelineController: logParsingPipelineController,
|
||||
Cache: c,
|
||||
FluxInterval: fluxInterval,
|
||||
Gateway: gatewayProxy,
|
||||
GatewayUrl: serverOptions.GatewayUrl,
|
||||
@@ -261,9 +250,15 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
&opAmpModel.AllAgents, agentConfMgr,
|
||||
)
|
||||
|
||||
errorList := reader.PreloadMetricsMetadata(context.Background())
|
||||
for _, er := range errorList {
|
||||
zap.L().Error("failed to preload metrics metadata", zap.Error(er))
|
||||
orgs, err := apiHandler.Signoz.Modules.Organization.GetAll(context.Background())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, org := range orgs {
|
||||
errorList := reader.PreloadMetricsMetadata(context.Background(), org.ID)
|
||||
for _, er := range errorList {
|
||||
zap.L().Error("failed to preload metrics metadata", zap.Error(er))
|
||||
}
|
||||
}
|
||||
|
||||
return s, nil
|
||||
@@ -327,7 +322,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.RegisterMessagingQueuesRoutes(r, am)
|
||||
apiHandler.RegisterThirdPartyApiRoutes(r, am)
|
||||
apiHandler.MetricExplorerRoutes(r, am)
|
||||
apiHandler.RegisterTraceFunnelsRoutes(r, am)
|
||||
|
||||
c := cors.New(cors.Options{
|
||||
AllowedOrigins: []string{"*"},
|
||||
|
||||
@@ -72,6 +72,7 @@ func main() {
|
||||
flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)")
|
||||
// Deprecated
|
||||
flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)")
|
||||
// Deprecated
|
||||
flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)")
|
||||
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)")
|
||||
flag.StringVar(&fluxIntervalForTraceDetail, "flux-interval-trace-detail", "2m", "(the interval to exclude data from being cached to avoid incorrect cache for trace data in motion)")
|
||||
@@ -138,7 +139,6 @@ func main() {
|
||||
HTTPHostPort: baseconst.HTTPHostPort,
|
||||
PreferSpanMetrics: preferSpanMetrics,
|
||||
PrivateHostPort: baseconst.PrivateHostPort,
|
||||
CacheConfigPath: cacheConfigPath,
|
||||
FluxInterval: fluxInterval,
|
||||
FluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
|
||||
Cluster: cluster,
|
||||
|
||||
@@ -12,10 +12,11 @@ import (
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
@@ -53,6 +54,7 @@ type AnomalyRule struct {
|
||||
|
||||
func NewAnomalyRule(
|
||||
id string,
|
||||
orgID valuer.UUID,
|
||||
p *ruletypes.PostableRule,
|
||||
reader interfaces.Reader,
|
||||
cache cache.Cache,
|
||||
@@ -66,7 +68,7 @@ func NewAnomalyRule(
|
||||
p.RuleCondition.Target = &target
|
||||
}
|
||||
|
||||
baseRule, err := baserules.NewBaseRule(id, p, reader, opts...)
|
||||
baseRule, err := baserules.NewBaseRule(id, orgID, p, reader, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -158,18 +160,18 @@ func (r *AnomalyRule) GetSelectedQuery() string {
|
||||
return r.Condition().GetSelectedQueryName()
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletypes.Vector, error) {
|
||||
func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
|
||||
|
||||
params, err := r.prepareQueryRange(ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = r.PopulateTemporality(ctx, params)
|
||||
err = r.PopulateTemporality(ctx, orgID, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("internal error while setting temporality")
|
||||
}
|
||||
|
||||
anomalies, err := r.provider.GetAnomalies(ctx, &anomaly.GetAnomaliesRequest{
|
||||
anomalies, err := r.provider.GetAnomalies(ctx, orgID, &anomaly.GetAnomaliesRequest{
|
||||
Params: params,
|
||||
Seasonality: r.seasonality,
|
||||
})
|
||||
@@ -204,7 +206,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
prevState := r.State()
|
||||
|
||||
valueFormatter := formatter.FromUnit(r.Unit())
|
||||
res, err := r.buildAndRunQuery(ctx, ts)
|
||||
res, err := r.buildAndRunQuery(ctx, r.OrgID(), ts)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -297,7 +299,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
// alerts[h] is ready, add or update active list now
|
||||
for h, a := range alerts {
|
||||
// Check whether we already have alerting state for the identifying label set.
|
||||
// UpdateSteps the last value and annotations if so, create a new alert entry otherwise.
|
||||
// Update the last value and annotations if so, create a new alert entry otherwise.
|
||||
if alert, ok := r.Active[h]; ok && alert.State != model.StateInactive {
|
||||
|
||||
alert.Value = a.Value
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@@ -23,6 +24,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
// create a threshold rule
|
||||
tr, err := baserules.NewThresholdRule(
|
||||
ruleId,
|
||||
opts.OrgID,
|
||||
opts.Rule,
|
||||
opts.Reader,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
@@ -43,6 +45,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
// create promql rule
|
||||
pr, err := baserules.NewPromRule(
|
||||
ruleId,
|
||||
opts.OrgID,
|
||||
opts.Rule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
@@ -63,6 +66,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
// create anomaly rule
|
||||
ar, err := NewAnomalyRule(
|
||||
ruleId,
|
||||
opts.OrgID,
|
||||
opts.Rule,
|
||||
opts.Reader,
|
||||
opts.Cache,
|
||||
@@ -119,6 +123,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
// create a threshold rule
|
||||
rule, err = baserules.NewThresholdRule(
|
||||
alertname,
|
||||
opts.OrgID,
|
||||
parsedRule,
|
||||
opts.Reader,
|
||||
baserules.WithSendAlways(),
|
||||
@@ -127,7 +132,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", alertname), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
|
||||
@@ -136,6 +141,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
// create promql rule
|
||||
rule, err = baserules.NewPromRule(
|
||||
alertname,
|
||||
opts.OrgID,
|
||||
parsedRule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
@@ -146,13 +152,14 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", alertname), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
} else if parsedRule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
rule, err = NewAnomalyRule(
|
||||
alertname,
|
||||
opts.OrgID,
|
||||
parsedRule,
|
||||
opts.Reader,
|
||||
opts.Cache,
|
||||
@@ -161,7 +168,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", alertname), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
} else {
|
||||
@@ -187,7 +194,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
|
||||
// newTask returns an appropriate group for
|
||||
// rule type
|
||||
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID string) baserules.Task {
|
||||
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) baserules.Task {
|
||||
if taskType == baserules.TaskTypeCh {
|
||||
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)
|
||||
}
|
||||
|
||||
39
frontend/public/Logos/datadog.svg
Normal file
39
frontend/public/Logos/datadog.svg
Normal file
@@ -0,0 +1,39 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 23.0.4, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 800.5 907.77" style="enable-background:new 0 0 800.5 907.77;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<path class="st0" d="M303.36,238.61c31.36-21.37,71.76-12.97,65-6.53c-12.89,12.28,4.26,8.65,6.11,31.31
|
||||
c1.36,16.69-4.09,25.88-8.78,31.11c-9.79,1.28-21.69,3.67-36.02,8.33c-8.48,2.76-15.85,5.82-22.31,8.9
|
||||
c-1.7-1.11-3.55-2.47-5.74-4.36C279.5,288.19,280.24,254.37,303.36,238.61 M490.68,370.72c5.69-4.41,31.55-12.72,55.49-15.55
|
||||
c12.57-1.48,30.49-2.34,34.31-0.2c7.59,4.19,7.59,17.16,2.39,29.14c-7.57,17.4-18.27,36.63-30.39,38.21
|
||||
c-19.77,2.61-38.46-8.09-59.8-24.03C485.06,392.56,480.38,378.68,490.68,370.72 M526.75,201.27c29.19,13.58,25.37,39.42,26.18,54.6
|
||||
c0.22,4.36,0.15,7.3-0.22,9.32c-4.04-2.19-10.43-3.8-20.56-3.35c-2.96,0.12-5.84,0.47-8.63,0.91c-10.77-5.77-17.21-17.06-23.1-29.06
|
||||
c-0.54-1.11-0.96-2.1-1.36-3.06c-0.17-0.44-0.35-0.91-0.52-1.31c-0.07-0.22-0.12-0.39-0.2-0.59c-3.23-10.25-1.06-12.3,0.3-15.46
|
||||
c1.41-3.23,6.68-5.89-1.11-8.58c-0.67-0.25-1.5-0.39-2.44-0.57C500.25,197.72,515.7,196.17,526.75,201.27 M367.62,510.22
|
||||
c-31.45-20.19-63.99-49.15-78.22-65.18c-2.39-1.8-2-9.79-2-9.79c12.84,9.98,66.11,48.04,122.44,65.42
|
||||
c19.87,6.14,50.36,8.46,76.81-6.53c20.21-11.46,44.54-31.43,59.06-52.01l2.66,4.61c-0.1,3.06-6.78,17.97-10.18,23.96
|
||||
c6.14,3.53,10.72,4.49,17.55,6.36l46.64-7.27c16.74-27.04,28.74-70.65,15.95-112.16c-7.3-23.81-45.36-71.22-48.09-73.83
|
||||
c-9.56-9.19,1.6-44.69-17.35-83.42C532.86,159.41,480.67,116.69,458,98.1c6.68,4.88,47.82,21.47,67,44.62
|
||||
c1.8-2.39,2.54-14.82,4.19-17.97c-16.47-21.57-17.75-59.95-17.75-70.21c0-18.81-9.56-40.13-9.56-40.13s16.47,13.04,20.73,35.5
|
||||
c5.03,26.6,15.75,47.55,29.93,65.28c26.84,33.43,51.08,50.58,63.33,38.23C630.53,138.58,601,72.2,563.28,35.15
|
||||
C519.25-8.09,507.74-2.52,481.91,6.7c-20.61,7.35-31.75,65.87-85.47,64.71c-9.1-1.06-32.54-1.63-44.13-1.53
|
||||
c6.04-8.43,11.22-14.94,11.22-14.94s-18.02,7.25-33.38,16.44l-1.18-1.77c5.18-10.92,10.75-17.82,10.75-17.82s-14.4,8.65-27.54,19.01
|
||||
c2.39-13.02,11.44-21.27,11.44-21.27s-18.19,3.28-41.36,28.77c-26.33,7.2-32.66,11.93-53.64,21.22
|
||||
c-34.12-7.44-50.21-19.45-65.55-41.56c-11.68-16.89-32.47-19.45-53.71-10.72c-30.97,12.8-70.14,30.33-70.14,30.33
|
||||
s12.77-0.52,26.08,0.05c-18.22,6.9-35.72,16.39-35.72,16.39s8.53-0.3,19.06-0.12c-7.27,6.04-11.29,8.92-18.22,13.51
|
||||
c-16.66,12.1-30.17,26.08-30.17,26.08s11.31-5.15,21.47-8.04c-7.1,16.27-21.18,28.25-18.59,48.17
|
||||
c2.49,18.19,24.82,55.66,53.64,78.66c2.49,2,41.86,38.43,71.56,23.47c29.68-14.94,41.39-28.25,46.27-48.66
|
||||
c5.74-23.44,2.47-41.17-9.79-92.05c-4.04-16.79-14.57-51.37-19.65-67.91l1.13-0.81c9.71,20.49,34.56,74.5,44.57,110.78
|
||||
c15.63,56.57,10.75,85.27,3.6,95.79c-21.57,31.73-76.84,35.92-101.98,18.34c-3.85,60.91,9.76,87.73,14.37,101.24
|
||||
c-2.29,15.53,7.77,44.37,7.77,44.37s1.13-13.11,5.74-20.02c1.23,15.41,9,33.72,9,33.72s-0.47-11.31,3.06-21.08
|
||||
c4.98,8.43,8.63,10.43,13.34,16.76c4.71,16.47,14.15,28.5,14.15,28.5s-1.53-8.83-0.69-18.02c23.05,22.14,27.02,54.45,29.31,79.28
|
||||
c6.46,68.26-107.63,122.54-129.74,165.24c-16.76,25.29-26.8,65.3,1.58,88.89c68.6,56.97,42.25,72.65,76.59,97.69
|
||||
c47.11,34.34,106.05,18.96,126.11-8.97c27.93-38.92,20.76-75.63,10.38-109.97c-8.11-26.85-30.15-71.46-57.41-88.72
|
||||
c-27.86-17.65-54.95-20.95-77.9-18.59l2.12-2.44c33.01-6.56,67.52-2.96,92.49,13.14c28.35,18.22,54.28,49.47,67.84,97.37
|
||||
c15.38-2.19,17.55-3.18,31.63-5.18l-31.7-246.76L367.62,510.22z M385.94,819.52l-3.65-34.22l71.29-108.74l80.93,23.64l69.59-116.23
|
||||
L687.52,639l63.38-132.92l22.53,242.07L385.94,819.52z M774.27,456.51l-254.72,46.17c-6.31,8.13-21.91,22.41-29.41,26.13
|
||||
c-32.17,16.2-53.91,11.51-72.7,6.63c-12.08-3.06-19.08-4.78-29.11-9.29l-62.17,8.53l37.74,314.87l436.35-78.66L774.27,456.51z"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.8 KiB |
70
frontend/public/Logos/grafana.svg
Normal file
70
frontend/public/Logos/grafana.svg
Normal file
@@ -0,0 +1,70 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 21.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 142.5 145.6" style="enable-background:new 0 0 142.5 145.6;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#565656;}
|
||||
.st1{fill:url(#SVGID_1_);}
|
||||
</style>
|
||||
<g>
|
||||
<path class="st0" d="M28.7,131.5c-0.3,7.9-6.6,14.1-14.4,14.1C6.1,145.6,0,139,0,130.9s6.6-14.7,14.7-14.7c3.6,0,7.2,1.6,10.2,4.4
|
||||
l-2.3,2.9c-2.3-2-5.1-3.4-7.9-3.4c-5.9,0-10.8,4.8-10.8,10.8c0,6.1,4.6,10.8,10.4,10.8c5.2,0,9.3-3.8,10.2-8.8H12.6v-3.5h16.1
|
||||
V131.5z"/>
|
||||
<path class="st0" d="M42.3,129.5h-2.2c-2.4,0-4.4,2-4.4,4.4v11.4h-3.9v-19.6H35v1.6c1.1-1.1,2.7-1.6,4.6-1.6h4.2L42.3,129.5z"/>
|
||||
<path class="st0" d="M63.7,145.3h-3.4v-2.5c-2.6,2.5-6.6,3.7-10.7,1.9c-3-1.3-5.3-4.1-5.9-7.4c-1.2-6.3,3.7-11.9,9.9-11.9
|
||||
c2.6,0,5,1.1,6.7,2.8v-2.5h3.4V145.3z M59.7,137c0.9-4-2.1-7.6-6-7.6c-3.4,0-6.1,2.8-6.1,6.1c0,3.8,3.3,6.7,7.2,6.1
|
||||
C57.1,141.2,59.1,139.3,59.7,137z"/>
|
||||
<path class="st0" d="M71.5,124.7v1.1h6.2v3.4h-6.2v16.1h-3.8v-20.5c0-4.3,3.1-6.8,7-6.8h4.7l-1.6,3.7h-3.1
|
||||
C72.9,121.6,71.5,123,71.5,124.7z"/>
|
||||
<path class="st0" d="M98.5,145.3h-3.3v-2.5c-2.6,2.5-6.6,3.7-10.7,1.9c-3-1.3-5.3-4.1-5.9-7.4c-1.2-6.3,3.7-11.9,9.9-11.9
|
||||
c2.6,0,5,1.1,6.7,2.8v-2.5h3.4v19.6H98.5z M94.5,137c0.9-4-2.1-7.6-6-7.6c-3.4,0-6.1,2.8-6.1,6.1c0,3.8,3.3,6.7,7.2,6.1
|
||||
C92,141.2,93.9,139.3,94.5,137z"/>
|
||||
<path class="st0" d="M119.4,133.8v11.5h-3.9v-11.6c0-2.4-2-4.4-4.4-4.4c-2.5,0-4.4,2-4.4,4.4v11.6h-3.9v-19.6h3.2v1.7
|
||||
c1.4-1.3,3.3-2,5.2-2C115.8,125.5,119.4,129.2,119.4,133.8z"/>
|
||||
<path class="st0" d="M142.4,145.3h-3.3v-2.5c-2.6,2.5-6.6,3.7-10.7,1.9c-3-1.3-5.3-4.1-5.9-7.4c-1.2-6.3,3.7-11.9,9.9-11.9
|
||||
c2.6,0,5,1.1,6.7,2.8v-2.5h3.4v19.6H142.4z M138.4,137c0.9-4-2.1-7.6-6-7.6c-3.4,0-6.1,2.8-6.1,6.1c0,3.8,3.3,6.7,7.2,6.1
|
||||
C135.9,141.2,137.8,139.3,138.4,137z"/>
|
||||
</g>
|
||||
<linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="71.25" y1="10.4893" x2="71.25" y2="113.3415" gradientTransform="matrix(1 0 0 -1 0 148.6)">
|
||||
<stop offset="0" style="stop-color:#FCEE1F"/>
|
||||
<stop offset="1" style="stop-color:#F15B2A"/>
|
||||
</linearGradient>
|
||||
<path class="st1" d="M122.9,49.9c-0.2-1.9-0.5-4.1-1.1-6.5c-0.6-2.4-1.6-5-2.9-7.8c-1.4-2.7-3.1-5.6-5.4-8.3
|
||||
c-0.9-1.1-1.9-2.1-2.9-3.2c1.6-6.3-1.9-11.8-1.9-11.8c-6.1-0.4-9.9,1.9-11.3,2.9c-0.2-0.1-0.5-0.2-0.7-0.3c-1-0.4-2.1-0.8-3.2-1.2
|
||||
c-1.1-0.3-2.2-0.7-3.3-0.9c-1.1-0.3-2.3-0.5-3.5-0.7c-0.2,0-0.4-0.1-0.6-0.1C83.5,3.6,75.9,0,75.9,0c-8.7,5.6-10.4,13.1-10.4,13.1
|
||||
s0,0.2-0.1,0.4c-0.5,0.1-0.9,0.3-1.4,0.4c-0.6,0.2-1.3,0.4-1.9,0.7c-0.6,0.3-1.3,0.5-1.9,0.8c-1.3,0.6-2.5,1.2-3.8,1.9
|
||||
c-1.2,0.7-2.4,1.4-3.5,2.2c-0.2-0.1-0.3-0.2-0.3-0.2c-11.7-4.5-22.1,0.9-22.1,0.9c-0.9,12.5,4.7,20.3,5.8,21.7
|
||||
c-0.3,0.8-0.5,1.5-0.8,2.3c-0.9,2.8-1.5,5.7-1.9,8.7c-0.1,0.4-0.1,0.9-0.2,1.3c-10.8,5.3-14,16.3-14,16.3c9,10.4,19.6,11,19.6,11
|
||||
l0,0c1.3,2.4,2.9,4.7,4.6,6.8c0.7,0.9,1.5,1.7,2.3,2.6c-3.3,9.4,0.5,17.3,0.5,17.3c10.1,0.4,16.7-4.4,18.1-5.5c1,0.3,2,0.6,3,0.9
|
||||
c3.1,0.8,6.3,1.3,9.4,1.4c0.8,0,1.6,0,2.4,0h0.4H80h0.5H81l0,0c4.7,6.8,13.1,7.7,13.1,7.7c5.9-6.3,6.3-12.4,6.3-13.8l0,0
|
||||
c0,0,0,0,0-0.1s0-0.2,0-0.2l0,0c0-0.1,0-0.2,0-0.3c1.2-0.9,2.4-1.8,3.6-2.8c2.4-2.1,4.4-4.6,6.2-7.2c0.2-0.2,0.3-0.5,0.5-0.7
|
||||
c6.7,0.4,11.4-4.2,11.4-4.2c-1.1-7-5.1-10.4-5.9-11l0,0c0,0,0,0-0.1-0.1l-0.1-0.1l0,0l-0.1-0.1c0-0.4,0.1-0.8,0.1-1.3
|
||||
c0.1-0.8,0.1-1.5,0.1-2.3v-0.6v-0.3v-0.1c0-0.2,0-0.1,0-0.2v-0.5v-0.6c0-0.2,0-0.4,0-0.6s0-0.4-0.1-0.6l-0.1-0.6l-0.1-0.6
|
||||
c-0.1-0.8-0.3-1.5-0.4-2.3c-0.7-3-1.9-5.9-3.4-8.4c-1.6-2.6-3.5-4.8-5.7-6.8c-2.2-1.9-4.6-3.5-7.2-4.6c-2.6-1.2-5.2-1.9-7.9-2.2
|
||||
c-1.3-0.2-2.7-0.2-4-0.2h-0.5h-0.1h-0.2h-0.2h-0.5c-0.2,0-0.4,0-0.5,0c-0.7,0.1-1.4,0.2-2,0.3c-2.7,0.5-5.2,1.5-7.4,2.8
|
||||
c-2.2,1.3-4.1,3-5.7,4.9s-2.8,3.9-3.6,6.1c-0.8,2.1-1.3,4.4-1.4,6.5c0,0.5,0,1.1,0,1.6c0,0.1,0,0.3,0,0.4v0.4c0,0.3,0,0.5,0.1,0.8
|
||||
c0.1,1.1,0.3,2.1,0.6,3.1c0.6,2,1.5,3.8,2.7,5.4s2.5,2.8,4,3.8s3,1.7,4.6,2.2c1.6,0.5,3.1,0.7,4.5,0.6c0.2,0,0.4,0,0.5,0
|
||||
c0.1,0,0.2,0,0.3,0s0.2,0,0.3,0c0.2,0,0.3,0,0.5,0h0.1h0.1c0.1,0,0.2,0,0.3,0c0.2,0,0.4-0.1,0.5-0.1c0.2,0,0.3-0.1,0.5-0.1
|
||||
c0.3-0.1,0.7-0.2,1-0.3c0.6-0.2,1.2-0.5,1.8-0.7c0.6-0.3,1.1-0.6,1.5-0.9c0.1-0.1,0.3-0.2,0.4-0.3c0.5-0.4,0.6-1.1,0.2-1.6
|
||||
c-0.4-0.4-1-0.5-1.5-0.3C88,74,87.9,74,87.7,74.1c-0.4,0.2-0.9,0.4-1.3,0.5c-0.5,0.1-1,0.3-1.5,0.4c-0.3,0-0.5,0.1-0.8,0.1
|
||||
c-0.1,0-0.3,0-0.4,0c-0.1,0-0.3,0-0.4,0s-0.3,0-0.4,0c-0.2,0-0.3,0-0.5,0c0,0-0.1,0,0,0h-0.1h-0.1c-0.1,0-0.1,0-0.2,0
|
||||
s-0.3,0-0.4-0.1c-1.1-0.2-2.3-0.5-3.4-1c-1.1-0.5-2.2-1.2-3.1-2.1c-1-0.9-1.8-1.9-2.5-3.1c-0.7-1.2-1.1-2.5-1.3-3.8
|
||||
c-0.1-0.7-0.2-1.4-0.1-2.1c0-0.2,0-0.4,0-0.6c0,0.1,0,0,0,0v-0.1v-0.1c0-0.1,0-0.2,0-0.3c0-0.4,0.1-0.7,0.2-1.1c0.5-3,2-5.9,4.3-8.1
|
||||
c0.6-0.6,1.2-1.1,1.9-1.5c0.7-0.5,1.4-0.9,2.1-1.2c0.7-0.3,1.5-0.6,2.3-0.8s1.6-0.4,2.4-0.4c0.4,0,0.8-0.1,1.2-0.1
|
||||
c0.1,0,0.2,0,0.3,0h0.3h0.2c0.1,0,0,0,0,0h0.1h0.3c0.9,0.1,1.8,0.2,2.6,0.4c1.7,0.4,3.4,1,5,1.9c3.2,1.8,5.9,4.5,7.5,7.8
|
||||
c0.8,1.6,1.4,3.4,1.7,5.3c0.1,0.5,0.1,0.9,0.2,1.4v0.3V66c0,0.1,0,0.2,0,0.3c0,0.1,0,0.2,0,0.3v0.3v0.3c0,0.2,0,0.6,0,0.8
|
||||
c0,0.5-0.1,1-0.1,1.5c-0.1,0.5-0.1,1-0.2,1.5s-0.2,1-0.3,1.5c-0.2,1-0.6,1.9-0.9,2.9c-0.7,1.9-1.7,3.7-2.9,5.3
|
||||
c-2.4,3.3-5.7,6-9.4,7.7c-1.9,0.8-3.8,1.5-5.8,1.8c-1,0.2-2,0.3-3,0.3H81h-0.2h-0.3H80h-0.3c0.1,0,0,0,0,0h-0.1
|
||||
c-0.5,0-1.1,0-1.6-0.1c-2.2-0.2-4.3-0.6-6.4-1.2c-2.1-0.6-4.1-1.4-6-2.4c-3.8-2-7.2-4.9-9.9-8.2c-1.3-1.7-2.5-3.5-3.5-5.4
|
||||
s-1.7-3.9-2.3-5.9c-0.6-2-0.9-4.1-1-6.2v-0.4v-0.1v-0.1v-0.2V60v-0.1v-0.1v-0.2v-0.5V59l0,0v-0.2c0-0.3,0-0.5,0-0.8
|
||||
c0-1,0.1-2.1,0.3-3.2c0.1-1.1,0.3-2.1,0.5-3.2c0.2-1.1,0.5-2.1,0.8-3.2c0.6-2.1,1.3-4.1,2.2-6c1.8-3.8,4.1-7.2,6.8-9.9
|
||||
c0.7-0.7,1.4-1.3,2.2-1.9c0.3-0.3,1-0.9,1.8-1.4c0.8-0.5,1.6-1,2.5-1.4c0.4-0.2,0.8-0.4,1.3-0.6c0.2-0.1,0.4-0.2,0.7-0.3
|
||||
c0.2-0.1,0.4-0.2,0.7-0.3c0.9-0.4,1.8-0.7,2.7-1c0.2-0.1,0.5-0.1,0.7-0.2c0.2-0.1,0.5-0.1,0.7-0.2c0.5-0.1,0.9-0.2,1.4-0.4
|
||||
c0.2-0.1,0.5-0.1,0.7-0.2c0.2,0,0.5-0.1,0.7-0.1c0.2,0,0.5-0.1,0.7-0.1l0.4-0.1l0.4-0.1c0.2,0,0.5-0.1,0.7-0.1
|
||||
c0.3,0,0.5-0.1,0.8-0.1c0.2,0,0.6-0.1,0.8-0.1c0.2,0,0.3,0,0.5-0.1h0.3h0.2h0.2c0.3,0,0.5,0,0.8-0.1h0.4c0,0,0.1,0,0,0h0.1h0.2
|
||||
c0.2,0,0.5,0,0.7,0c0.9,0,1.8,0,2.7,0c1.8,0.1,3.6,0.3,5.3,0.6c3.4,0.6,6.7,1.7,9.6,3.2c2.9,1.4,5.6,3.2,7.8,5.1
|
||||
c0.1,0.1,0.3,0.2,0.4,0.4c0.1,0.1,0.3,0.2,0.4,0.4c0.3,0.2,0.5,0.5,0.8,0.7c0.3,0.2,0.5,0.5,0.8,0.7c0.2,0.3,0.5,0.5,0.7,0.8
|
||||
c1,1,1.9,2.1,2.7,3.1c1.6,2.1,2.9,4.2,3.9,6.2c0.1,0.1,0.1,0.2,0.2,0.4c0.1,0.1,0.1,0.2,0.2,0.4s0.2,0.5,0.4,0.7
|
||||
c0.1,0.2,0.2,0.5,0.3,0.7c0.1,0.2,0.2,0.5,0.3,0.7c0.4,0.9,0.7,1.8,1,2.7c0.5,1.4,0.8,2.6,1.1,3.6c0.1,0.4,0.5,0.7,0.9,0.7
|
||||
c0.5,0,0.8-0.4,0.8-0.9C123,52.7,123,51.4,122.9,49.9z"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 6.6 KiB |
1
frontend/public/Logos/temporal.svg
Normal file
1
frontend/public/Logos/temporal.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="192" height="192" fill="none" viewBox="0 0 192 192"><rect width="192" height="192" fill="url(#paint0_linear_1452_5317)" rx="24"/><path fill="#F2F2F2" d="M123.34 68.6596C119.655 41.0484 110.327 18 96 18C81.6731 18 72.3454 41.0484 68.6596 68.6596C41.0484 72.3454 18 81.6731 18 96C18 110.327 41.0525 119.655 68.6596 123.34C72.3454 150.948 81.6731 174 96 174C110.327 174 119.655 150.948 123.34 123.34C150.952 119.655 174 110.327 174 96C174 81.6731 150.948 72.3454 123.34 68.6596ZM67.7583 115.298C41.3151 111.479 25.893 102.737 25.893 96C25.893 89.2629 41.3151 80.5212 67.7583 76.7021C67.1764 83.0674 66.8733 89.566 66.8733 96C66.8733 102.434 67.1764 108.937 67.7583 115.298ZM96 25.893C102.737 25.893 111.479 41.3151 115.298 67.7583C108.937 67.1764 102.434 66.8733 96 66.8733C89.566 66.8733 83.0633 67.1764 76.7021 67.7583C80.5212 41.3151 89.2629 25.893 96 25.893ZM124.242 115.298C122.94 115.488 117.602 116.114 116.252 116.248C116.118 117.602 115.488 122.936 115.302 124.238C111.483 150.681 102.741 166.103 96.0041 166.103C89.267 166.103 80.5253 150.681 76.7061 124.238C76.5202 122.936 75.8898 117.598 75.7564 116.248C75.1421 109.979 74.7703 103.246 74.7703 96C74.7703 88.7537 75.1421 82.0206 75.7564 75.7483C82.0247 75.134 88.7577 74.7622 96.0041 74.7622C103.25 74.7622 109.983 75.134 116.252 75.7483C117.606 75.8817 122.94 76.5121 124.242 76.698C150.685 80.5172 166.111 89.2629 166.111 95.996C166.111 102.729 150.685 111.479 124.242 115.298Z"/><defs><linearGradient id="paint0_linear_1452_5317" x1="183" x2="0" y1="192" y2="0" gradientUnits="userSpaceOnUse"><stop stop-color="#444CE7"/><stop offset="1" stop-color="#B664FF"/></linearGradient></defs></svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
@@ -60,6 +60,8 @@ function App(): JSX.Element {
|
||||
|
||||
const { isCloudUser, isEnterpriseSelfHostedUser } = useGetTenantLicense();
|
||||
|
||||
const [isSentryInitialized, setIsSentryInitialized] = useState(false);
|
||||
|
||||
const enableAnalytics = useCallback(
|
||||
(user: IUser): void => {
|
||||
// wait for the required data to be loaded before doing init for anything!
|
||||
@@ -293,25 +295,29 @@ function App(): JSX.Element {
|
||||
Userpilot.initialize(process.env.USERPILOT_KEY);
|
||||
}
|
||||
|
||||
Sentry.init({
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
tunnel: process.env.TUNNEL_URL,
|
||||
environment: 'production',
|
||||
integrations: [
|
||||
Sentry.browserTracingIntegration(),
|
||||
Sentry.replayIntegration({
|
||||
maskAllText: false,
|
||||
blockAllMedia: false,
|
||||
}),
|
||||
],
|
||||
// Performance Monitoring
|
||||
tracesSampleRate: 1.0, // Capture 100% of the transactions
|
||||
// Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled
|
||||
tracePropagationTargets: [],
|
||||
// Session Replay
|
||||
replaysSessionSampleRate: 0.1, // This sets the sample rate at 10%. You may want to change it to 100% while in development and then sample at a lower rate in production.
|
||||
replaysOnErrorSampleRate: 1.0, // If you're not already sampling the entire session, change the sample rate to 100% when sampling sessions where errors occur.
|
||||
});
|
||||
if (!isSentryInitialized) {
|
||||
Sentry.init({
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
tunnel: process.env.TUNNEL_URL,
|
||||
environment: 'production',
|
||||
integrations: [
|
||||
Sentry.browserTracingIntegration(),
|
||||
Sentry.replayIntegration({
|
||||
maskAllText: false,
|
||||
blockAllMedia: false,
|
||||
}),
|
||||
],
|
||||
// Performance Monitoring
|
||||
tracesSampleRate: 1.0, // Capture 100% of the transactions
|
||||
// Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled
|
||||
tracePropagationTargets: [],
|
||||
// Session Replay
|
||||
replaysSessionSampleRate: 0.1, // This sets the sample rate at 10%. You may want to change it to 100% while in development and then sample at a lower rate in production.
|
||||
replaysOnErrorSampleRate: 1.0, // If you're not already sampling the entire session, change the sample rate to 100% when sampling sessions where errors occur.
|
||||
});
|
||||
|
||||
setIsSentryInitialized(true);
|
||||
}
|
||||
} else {
|
||||
posthog.reset();
|
||||
Sentry.close();
|
||||
@@ -320,6 +326,7 @@ function App(): JSX.Element {
|
||||
window.cioanalytics.reset();
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isCloudUser, isEnterpriseSelfHostedUser]);
|
||||
|
||||
// if the user is in logged in state
|
||||
|
||||
@@ -54,7 +54,9 @@ export const REACT_QUERY_KEY = {
|
||||
|
||||
// API Monitoring Query Keys
|
||||
GET_DOMAINS_LIST: 'GET_DOMAINS_LIST',
|
||||
GET_DOMAIN_METRICS_DATA: 'GET_DOMAIN_METRICS_DATA',
|
||||
GET_ENDPOINTS_LIST_BY_DOMAIN: 'GET_ENDPOINTS_LIST_BY_DOMAIN',
|
||||
GET_TOP_ERRORS_BY_DOMAIN: 'GET_TOP_ERRORS_BY_DOMAIN',
|
||||
GET_NESTED_ENDPOINTS_LIST: 'GET_NESTED_ENDPOINTS_LIST',
|
||||
GET_ENDPOINT_METRICS_DATA: 'GET_ENDPOINT_METRICS_DATA',
|
||||
GET_ENDPOINT_STATUS_CODE_DATA: 'GET_ENDPOINT_STATUS_CODE_DATA',
|
||||
|
||||
1595
frontend/src/container/ApiMonitoring/APIMonitoringUtils.test.tsx
Normal file
1595
frontend/src/container/ApiMonitoring/APIMonitoringUtils.test.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,29 +1,17 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Select, Spin, Table, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { Select } from 'antd';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import {
|
||||
EndPointsTableRowData,
|
||||
formatEndPointsDataForTable,
|
||||
getEndPointsColumnsConfig,
|
||||
getEndPointsQueryPayload,
|
||||
getAllEndpointsWidgetData,
|
||||
getGroupByFiltersFromGroupByValues,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||
import { useGetAggregateKeys } from 'hooks/queryBuilder/useGetAggregateKeys';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import ErrorState from './components/ErrorState';
|
||||
import ExpandedRow from './components/ExpandedRow';
|
||||
import { VIEW_TYPES, VIEWS } from './constants';
|
||||
import { SPAN_ATTRIBUTES, VIEWS } from './constants';
|
||||
|
||||
function AllEndPoints({
|
||||
domainName,
|
||||
@@ -31,13 +19,27 @@ function AllEndPoints({
|
||||
setSelectedView,
|
||||
groupBy,
|
||||
setGroupBy,
|
||||
timeRange,
|
||||
initialFilters,
|
||||
setInitialFiltersEndPointStats,
|
||||
}: {
|
||||
domainName: string;
|
||||
setSelectedEndPointName: (name: string) => void;
|
||||
setSelectedView: (tab: VIEWS) => void;
|
||||
groupBy: IBuilderQuery['groupBy'];
|
||||
setGroupBy: (groupBy: IBuilderQuery['groupBy']) => void;
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
};
|
||||
initialFilters: IBuilderQuery['filters'];
|
||||
setInitialFiltersEndPointStats: (filters: IBuilderQuery['filters']) => void;
|
||||
}): JSX.Element {
|
||||
const [groupBySearchValue, setGroupBySearchValue] = useState<string>('');
|
||||
const [allAvailableGroupByOptions, setAllAvailableGroupByOptions] = useState<{
|
||||
[key: string]: any;
|
||||
}>({});
|
||||
|
||||
const {
|
||||
data: groupByFiltersData,
|
||||
isLoading: isLoadingGroupByFilters,
|
||||
@@ -45,7 +47,7 @@ function AllEndPoints({
|
||||
dataSource: DataSource.TRACES,
|
||||
aggregateAttribute: '',
|
||||
aggregateOperator: 'noop',
|
||||
searchText: '',
|
||||
searchText: groupBySearchValue,
|
||||
tagType: '',
|
||||
});
|
||||
|
||||
@@ -53,130 +55,144 @@ function AllEndPoints({
|
||||
{ value: string; label: string }[]
|
||||
>([]);
|
||||
|
||||
const [expandedRowKeys, setExpandedRowKeys] = useState<React.Key[]>([]);
|
||||
|
||||
const handleGroupByChange = useCallback(
|
||||
(value: IBuilderQuery['groupBy']) => {
|
||||
const groupBy = [];
|
||||
const newGroupBy = [];
|
||||
|
||||
for (let index = 0; index < value.length; index++) {
|
||||
const element = (value[index] as unknown) as string;
|
||||
|
||||
const key = groupByFiltersData?.payload?.attributeKeys?.find(
|
||||
(key) => key.key === element,
|
||||
);
|
||||
// Check if the key exists in our cached options first
|
||||
if (allAvailableGroupByOptions[element]) {
|
||||
newGroupBy.push(allAvailableGroupByOptions[element]);
|
||||
} else {
|
||||
// If not found in cache, check the current filtered results
|
||||
const key = groupByFiltersData?.payload?.attributeKeys?.find(
|
||||
(key) => key.key === element,
|
||||
);
|
||||
|
||||
if (key) {
|
||||
groupBy.push(key);
|
||||
if (key) {
|
||||
newGroupBy.push(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
setGroupBy(groupBy);
|
||||
|
||||
setGroupBy(newGroupBy);
|
||||
setGroupBySearchValue('');
|
||||
},
|
||||
[groupByFiltersData, setGroupBy],
|
||||
[groupByFiltersData, setGroupBy, allAvailableGroupByOptions],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (groupByFiltersData?.payload) {
|
||||
// Update dropdown options
|
||||
setGroupByOptions(
|
||||
groupByFiltersData?.payload?.attributeKeys?.map((filter) => ({
|
||||
value: filter.key,
|
||||
label: filter.key,
|
||||
})) || [],
|
||||
);
|
||||
|
||||
// Cache all available options to preserve selected values using functional update
|
||||
// to avoid dependency on allAvailableGroupByOptions
|
||||
setAllAvailableGroupByOptions((prevOptions) => {
|
||||
const newOptions = { ...prevOptions };
|
||||
groupByFiltersData?.payload?.attributeKeys?.forEach((filter) => {
|
||||
newOptions[filter.key] = filter;
|
||||
});
|
||||
return newOptions;
|
||||
});
|
||||
}
|
||||
}, [groupByFiltersData]);
|
||||
}, [groupByFiltersData]); // Only depends on groupByFiltersData now
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const queryPayloads = useMemo(
|
||||
() =>
|
||||
getEndPointsQueryPayload(
|
||||
groupBy,
|
||||
domainName,
|
||||
Math.floor(minTime / 1e9),
|
||||
Math.floor(maxTime / 1e9),
|
||||
),
|
||||
[groupBy, domainName, minTime, maxTime],
|
||||
);
|
||||
|
||||
// Since only one query here
|
||||
const endPointsDataQueries = useQueries(
|
||||
queryPayloads.map((payload) => ({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_ENDPOINTS_LIST_BY_DOMAIN,
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
groupBy,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
staleTime: 60 * 1000, // 1 minute stale time : optimize this part
|
||||
})),
|
||||
);
|
||||
|
||||
const endPointsDataQuery = endPointsDataQueries[0];
|
||||
const {
|
||||
data: allEndPointsData,
|
||||
isLoading,
|
||||
isRefetching,
|
||||
isError,
|
||||
refetch,
|
||||
} = endPointsDataQuery;
|
||||
|
||||
const endPointsColumnsConfig = useMemo(
|
||||
() => getEndPointsColumnsConfig(groupBy.length > 0, expandedRowKeys),
|
||||
[groupBy.length, expandedRowKeys],
|
||||
);
|
||||
|
||||
const expandedRowRender = (record: EndPointsTableRowData): JSX.Element => (
|
||||
<ExpandedRow
|
||||
domainName={domainName}
|
||||
selectedRowData={record}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
setSelectedView={setSelectedView}
|
||||
/>
|
||||
);
|
||||
|
||||
const handleGroupByRowClick = (record: EndPointsTableRowData): void => {
|
||||
if (expandedRowKeys.includes(record.key)) {
|
||||
setExpandedRowKeys(expandedRowKeys.filter((key) => key !== record.key));
|
||||
} else {
|
||||
setExpandedRowKeys((expandedRowKeys) => [...expandedRowKeys, record.key]);
|
||||
// Cache existing selected options on component mount
|
||||
useEffect(() => {
|
||||
if (groupBy && groupBy.length > 0) {
|
||||
setAllAvailableGroupByOptions((prevOptions) => {
|
||||
const newOptions = { ...prevOptions };
|
||||
groupBy.forEach((option) => {
|
||||
newOptions[option.key] = option;
|
||||
});
|
||||
return newOptions;
|
||||
});
|
||||
}
|
||||
};
|
||||
}, [groupBy]); // Removed allAvailableGroupByOptions from dependencies
|
||||
|
||||
const handleRowClick = (record: EndPointsTableRowData): void => {
|
||||
if (groupBy.length === 0) {
|
||||
setSelectedEndPointName(record.endpointName); // this will open up the endpoint details tab
|
||||
setSelectedView(VIEW_TYPES.ENDPOINT_DETAILS);
|
||||
logEvent('API Monitoring: Endpoint name row clicked', {});
|
||||
} else {
|
||||
handleGroupByRowClick(record); // this will prepare the nested query payload
|
||||
}
|
||||
};
|
||||
const currentQuery = initialQueriesMap[DataSource.TRACES];
|
||||
|
||||
const formattedEndPointsData = useMemo(
|
||||
() =>
|
||||
formatEndPointsDataForTable(
|
||||
allEndPointsData?.payload?.data?.result[0]?.table?.rows,
|
||||
groupBy,
|
||||
),
|
||||
[groupBy, allEndPointsData],
|
||||
// Local state for filters, combining endpoint filter and search filters
|
||||
const [filters, setFilters] = useState<IBuilderQuery['filters']>(() => {
|
||||
// Initialize filters based on the initial endPointName prop
|
||||
const initialItems = [...initialFilters.items];
|
||||
return { op: 'AND', items: initialItems };
|
||||
});
|
||||
|
||||
// Handler for changes from the QueryBuilderSearchV2 component
|
||||
const handleFilterChange = useCallback(
|
||||
(newFilters: IBuilderQuery['filters']): void => {
|
||||
// 1. Update local filters state immediately
|
||||
setFilters(newFilters);
|
||||
},
|
||||
[], // Dependencies for the callback
|
||||
);
|
||||
|
||||
if (isError) {
|
||||
return (
|
||||
<div className="all-endpoints-error-state-wrapper">
|
||||
<ErrorState refetch={refetch} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
const updatedCurrentQuery = useMemo(
|
||||
() => ({
|
||||
...currentQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...currentQuery.builder.queryData[0],
|
||||
dataSource: DataSource.TRACES,
|
||||
filters, // Use the local filters state
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
[filters, currentQuery],
|
||||
);
|
||||
|
||||
const query = updatedCurrentQuery?.builder?.queryData[0] || null;
|
||||
|
||||
const allEndpointsWidgetData = useMemo(
|
||||
() => getAllEndpointsWidgetData(groupBy, domainName, filters),
|
||||
[groupBy, domainName, filters],
|
||||
);
|
||||
|
||||
const onRowClick = useCallback(
|
||||
(props: any): void => {
|
||||
setSelectedEndPointName(props[SPAN_ATTRIBUTES.URL_PATH] as string);
|
||||
setSelectedView(VIEWS.ENDPOINT_STATS);
|
||||
const initialItems = [
|
||||
...filters.items,
|
||||
...getGroupByFiltersFromGroupByValues(props, groupBy).items,
|
||||
];
|
||||
setInitialFiltersEndPointStats({
|
||||
items: initialItems,
|
||||
op: 'AND',
|
||||
});
|
||||
},
|
||||
[
|
||||
filters,
|
||||
setInitialFiltersEndPointStats,
|
||||
setSelectedEndPointName,
|
||||
setSelectedView,
|
||||
groupBy,
|
||||
],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="all-endpoints-container">
|
||||
<div className="all-endpoints-header">
|
||||
<div className="filter-container">
|
||||
<QueryBuilderSearchV2
|
||||
query={query}
|
||||
onChange={handleFilterChange}
|
||||
placeholder="Search for filters..."
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="group-by-container">
|
||||
<div className="group-by-label"> Group by </div>
|
||||
<Select
|
||||
@@ -189,49 +205,17 @@ function AllEndPoints({
|
||||
placeholder="Search for attribute"
|
||||
options={groupByOptions}
|
||||
onChange={handleGroupByChange}
|
||||
onSearch={(value: string): void => setGroupBySearchValue(value)}
|
||||
/>{' '}
|
||||
</div>
|
||||
<div className="endpoints-table-container">
|
||||
<div className="endpoints-table-header">Endpoint overview</div>
|
||||
<Table
|
||||
columns={endPointsColumnsConfig}
|
||||
loading={{
|
||||
spinning: isLoading || isRefetching,
|
||||
indicator: <Spin indicator={<LoadingOutlined size={14} spin />} />,
|
||||
}}
|
||||
dataSource={isLoading || isRefetching ? [] : formattedEndPointsData}
|
||||
locale={{
|
||||
emptyText:
|
||||
isLoading || isRefetching ? null : (
|
||||
<div className="no-filtered-endpoints-message-container">
|
||||
<div className="no-filtered-endpoints-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
|
||||
<Typography.Text className="no-filtered-endpoints-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
}}
|
||||
scroll={{ x: true }}
|
||||
tableLayout="fixed"
|
||||
onRow={(record): { onClick: () => void; className: string } => ({
|
||||
onClick: (): void => handleRowClick(record),
|
||||
className: 'clickable-row',
|
||||
})}
|
||||
expandable={{
|
||||
expandedRowRender: groupBy.length > 0 ? expandedRowRender : undefined,
|
||||
expandedRowKeys,
|
||||
expandIconColumnIndex: -1,
|
||||
}}
|
||||
rowClassName={(_, index): string =>
|
||||
index % 2 === 0 ? 'table-row-dark' : 'table-row-light'
|
||||
}
|
||||
<GridCard
|
||||
widget={allEndpointsWidgetData}
|
||||
isQueryEnabled
|
||||
onDragSelect={(): void => {}}
|
||||
customOnDragSelect={(): void => {}}
|
||||
customTimeRange={timeRange}
|
||||
customOnRowClick={onRowClick}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -12,6 +12,12 @@
|
||||
background: var(--bg-ink-300);
|
||||
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
.domain-details-drawer-header-right-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
.domain-detail-drawer {
|
||||
@@ -246,6 +252,9 @@
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
|
||||
.endpoints-table-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
padding: 12px;
|
||||
color: var(--Vanilla-100, #fff);
|
||||
font-family: Inter;
|
||||
@@ -299,6 +308,7 @@
|
||||
|
||||
.ant-table-tbody > tr:hover > td {
|
||||
background: rgba(255, 255, 255, 0.04);
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.ant-table-cell:first-child {
|
||||
@@ -386,6 +396,21 @@
|
||||
padding-top: 20px;
|
||||
}
|
||||
|
||||
.top-errors-dropdown-container {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
gap: 10px;
|
||||
align-items: center;
|
||||
|
||||
.endpoint-details-filters-container-dropdown {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.endpoint-details-filters-container-search {
|
||||
flex: 1;
|
||||
}
|
||||
}
|
||||
|
||||
.endpoint-details-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
@@ -690,30 +715,140 @@
|
||||
border-radius: 3px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
|
||||
.top-services-title {
|
||||
border-bottom: 1px solid var(--bg-slate-500);
|
||||
padding: 10px 12px;
|
||||
border-radius: 3px 3px 0px 0px;
|
||||
background: rgba(171, 189, 255, 0.04);
|
||||
.title-wrapper {
|
||||
display: inline-flex;
|
||||
padding: 1px 2px;
|
||||
align-items: center;
|
||||
border-radius: 2px;
|
||||
background: rgba(113, 144, 249, 0.08);
|
||||
|
||||
.title-wrapper {
|
||||
display: inline-flex;
|
||||
padding: 1px 2px;
|
||||
align-items: center;
|
||||
border-radius: 2px;
|
||||
background: rgba(113, 144, 249, 0.08);
|
||||
|
||||
color: var(--bg-robin-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
color: var(--bg-robin-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
.dependent-services-container {
|
||||
padding: 10px 12px;
|
||||
border-radius: 3px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
.ant-table {
|
||||
.ant-table-thead > tr > th {
|
||||
padding: 12px;
|
||||
font-weight: 500;
|
||||
font-size: 12px;
|
||||
line-height: 18px;
|
||||
border-bottom: none;
|
||||
|
||||
color: var(--text-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 11px;
|
||||
font-style: normal;
|
||||
font-weight: 600;
|
||||
line-height: 18px;
|
||||
/* 163.636% */
|
||||
letter-spacing: 0.44px;
|
||||
text-transform: uppercase;
|
||||
background: none;
|
||||
|
||||
&::before {
|
||||
background-color: transparent;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-table-thead > tr > th:has(.status-code-header) {
|
||||
background: var(--bg-ink-300);
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.ant-table-cell {
|
||||
padding: 12px;
|
||||
font-size: 13px;
|
||||
line-height: 20px;
|
||||
color: var(--bg-vanilla-100);
|
||||
border-bottom: none;
|
||||
background: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
.ant-table-cell:has(.col-title) {
|
||||
background: rgba(171, 189, 255, 0.04);
|
||||
}
|
||||
|
||||
.ant-table-cell:has(.top-services-item-latency) {
|
||||
text-align: center;
|
||||
opacity: 0.8;
|
||||
background: rgba(171, 189, 255, 0.04);
|
||||
}
|
||||
|
||||
.ant-table-cell:has(.top-services-item-latency-title) {
|
||||
text-align: center;
|
||||
opacity: 0.8;
|
||||
background: rgba(171, 189, 255, 0.04);
|
||||
}
|
||||
|
||||
.ant-table-tbody > tr:hover > td {
|
||||
background: rgba(255, 255, 255, 0.04);
|
||||
}
|
||||
|
||||
.ant-table-cell:first-child {
|
||||
text-align: justify;
|
||||
}
|
||||
|
||||
.ant-table-cell:nth-child(2) {
|
||||
padding-left: 16px;
|
||||
padding-right: 16px;
|
||||
}
|
||||
|
||||
.ant-table-cell:nth-child(n + 3) {
|
||||
padding-right: 24px;
|
||||
}
|
||||
|
||||
.ant-table-tbody > tr > td {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.ant-table-thead
|
||||
> tr
|
||||
> th:not(:last-child):not(.ant-table-selection-column):not(.ant-table-row-expand-icon-cell):not([colspan])::before {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.ant-empty-normal {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
.table-row-dark {
|
||||
background: var(--bg-ink-300);
|
||||
}
|
||||
|
||||
.ant-table-content {
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
}
|
||||
|
||||
.no-status-code-data-message-container {
|
||||
height: 30vh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
|
||||
.no-status-code-data-message-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
justify-content: center;
|
||||
|
||||
width: fit-content;
|
||||
padding: 24px;
|
||||
}
|
||||
|
||||
.no-status-code-data-message {
|
||||
margin-top: 8px;
|
||||
}
|
||||
}
|
||||
|
||||
.top-services-item {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
@@ -743,6 +878,7 @@
|
||||
|
||||
.top-services-item-progress-bar {
|
||||
background-color: var(--bg-slate-400);
|
||||
border-radius: 2px;
|
||||
height: 100%;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
@@ -758,7 +894,7 @@
|
||||
|
||||
.top-services-load-more {
|
||||
border-top: 1px solid var(--bg-slate-500);
|
||||
padding-top: 10px;
|
||||
padding: 10px;
|
||||
|
||||
color: var(--text-vanilla-400);
|
||||
font-family: Inter;
|
||||
|
||||
@@ -3,15 +3,27 @@ import './DomainDetails.styles.scss';
|
||||
import { Color, Spacing } from '@signozhq/design-tokens';
|
||||
import { Button, Divider, Drawer, Radio, Typography } from 'antd';
|
||||
import { RadioChangeEvent } from 'antd/lib';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import {
|
||||
CustomTimeType,
|
||||
Time,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import GetMinMax from 'lib/getMinMax';
|
||||
import { ArrowDown, ArrowUp, X } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import AllEndPoints from './AllEndPoints';
|
||||
import DomainMetrics from './components/DomainMetrics';
|
||||
import { VIEW_TYPES, VIEWS } from './constants';
|
||||
import EndPointDetailsWrapper from './EndPointDetailsWrapper';
|
||||
import EndPointDetails from './EndPointDetails';
|
||||
import TopErrors from './TopErrors';
|
||||
|
||||
const TimeRangeOffset = 1000000000;
|
||||
|
||||
function DomainDetails({
|
||||
domainData,
|
||||
@@ -33,12 +45,58 @@ function DomainDetails({
|
||||
const [endPointsGroupBy, setEndPointsGroupBy] = useState<
|
||||
IBuilderQuery['groupBy']
|
||||
>([]);
|
||||
const [initialFiltersEndPointStats, setInitialFiltersEndPointStats] = useState<
|
||||
IBuilderQuery['filters']
|
||||
>(domainListFilters);
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const handleTabChange = (e: RadioChangeEvent): void => {
|
||||
setSelectedView(e.target.value);
|
||||
};
|
||||
|
||||
const { maxTime, minTime, selectedTime } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
>((state) => state.globalTime);
|
||||
|
||||
const startMs = useMemo(() => Math.floor(Number(minTime) / TimeRangeOffset), [
|
||||
minTime,
|
||||
]);
|
||||
const endMs = useMemo(() => Math.floor(Number(maxTime) / TimeRangeOffset), [
|
||||
maxTime,
|
||||
]);
|
||||
|
||||
const [selectedInterval, setSelectedInterval] = useState<Time>(
|
||||
selectedTime as Time,
|
||||
);
|
||||
|
||||
const [modalTimeRange, setModalTimeRange] = useState(() => ({
|
||||
startTime: startMs,
|
||||
endTime: endMs,
|
||||
}));
|
||||
|
||||
const handleTimeChange = useCallback(
|
||||
(interval: Time | CustomTimeType, dateTimeRange?: [number, number]): void => {
|
||||
setSelectedInterval(interval as Time);
|
||||
|
||||
if (interval === 'custom' && dateTimeRange) {
|
||||
setModalTimeRange({
|
||||
startTime: Math.floor(dateTimeRange[0] / 1000),
|
||||
endTime: Math.floor(dateTimeRange[1] / 1000),
|
||||
});
|
||||
} else {
|
||||
const { maxTime, minTime } = GetMinMax(interval);
|
||||
|
||||
setModalTimeRange({
|
||||
startTime: Math.floor(minTime / TimeRangeOffset),
|
||||
endTime: Math.floor(maxTime / TimeRangeOffset),
|
||||
});
|
||||
}
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[],
|
||||
);
|
||||
|
||||
return (
|
||||
<Drawer
|
||||
width="60%"
|
||||
@@ -50,32 +108,44 @@ function DomainDetails({
|
||||
{domainData.domainName}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<Button.Group className="domain-details-drawer-header-ctas">
|
||||
<Button
|
||||
className="domain-navigate-cta"
|
||||
onClick={(): void => {
|
||||
setSelectedDomainIndex(selectedDomainIndex - 1);
|
||||
setSelectedEndPointName('');
|
||||
setEndPointsGroupBy([]);
|
||||
setSelectedView(VIEW_TYPES.ALL_ENDPOINTS);
|
||||
}}
|
||||
icon={<ArrowUp size={16} />}
|
||||
disabled={selectedDomainIndex === 0}
|
||||
title="Previous domain"
|
||||
<div className="domain-details-drawer-header-right-container">
|
||||
<DateTimeSelectionV2
|
||||
showAutoRefresh={false}
|
||||
showRefreshText={false}
|
||||
onTimeChange={handleTimeChange}
|
||||
defaultRelativeTime="5m"
|
||||
isModalTimeSelection
|
||||
modalSelectedInterval={selectedInterval}
|
||||
modalInitialStartTime={modalTimeRange.startTime * 1000}
|
||||
modalInitialEndTime={modalTimeRange.endTime * 1000}
|
||||
/>
|
||||
<Button
|
||||
className="domain-navigate-cta"
|
||||
onClick={(): void => {
|
||||
setSelectedDomainIndex(selectedDomainIndex + 1);
|
||||
setSelectedEndPointName('');
|
||||
setEndPointsGroupBy([]);
|
||||
setSelectedView(VIEW_TYPES.ALL_ENDPOINTS);
|
||||
}}
|
||||
icon={<ArrowDown size={16} />}
|
||||
disabled={selectedDomainIndex === domainListLength - 1}
|
||||
title="Next domain"
|
||||
/>
|
||||
</Button.Group>
|
||||
<Button.Group className="domain-details-drawer-header-ctas">
|
||||
<Button
|
||||
className="domain-navigate-cta"
|
||||
onClick={(): void => {
|
||||
setSelectedDomainIndex(selectedDomainIndex - 1);
|
||||
setSelectedEndPointName('');
|
||||
setEndPointsGroupBy([]);
|
||||
setSelectedView(VIEW_TYPES.ALL_ENDPOINTS);
|
||||
}}
|
||||
icon={<ArrowUp size={16} />}
|
||||
disabled={selectedDomainIndex === 0}
|
||||
title="Previous domain"
|
||||
/>
|
||||
<Button
|
||||
className="domain-navigate-cta"
|
||||
onClick={(): void => {
|
||||
setSelectedDomainIndex(selectedDomainIndex + 1);
|
||||
setSelectedEndPointName('');
|
||||
setEndPointsGroupBy([]);
|
||||
setSelectedView(VIEW_TYPES.ALL_ENDPOINTS);
|
||||
}}
|
||||
icon={<ArrowDown size={16} />}
|
||||
disabled={selectedDomainIndex === domainListLength - 1}
|
||||
title="Next domain"
|
||||
/>
|
||||
</Button.Group>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
placement="right"
|
||||
@@ -91,7 +161,11 @@ function DomainDetails({
|
||||
>
|
||||
{domainData && (
|
||||
<>
|
||||
<DomainMetrics domainData={domainData} />
|
||||
<DomainMetrics
|
||||
domainName={domainData.domainName}
|
||||
domainListFilters={domainListFilters}
|
||||
timeRange={modalTimeRange}
|
||||
/>
|
||||
<div className="views-tabs-container">
|
||||
<Radio.Group
|
||||
className="views-tabs"
|
||||
@@ -109,13 +183,21 @@ function DomainDetails({
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.ENDPOINT_DETAILS
|
||||
selectedView === VIEW_TYPES.ENDPOINT_STATS
|
||||
? 'tab selected_view'
|
||||
: 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.ENDPOINT_DETAILS}
|
||||
value={VIEW_TYPES.ENDPOINT_STATS}
|
||||
>
|
||||
<div className="view-title">Endpoint Details</div>
|
||||
<div className="view-title">Endpoint(s) Stats</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.TOP_ERRORS ? 'tab selected_view' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.TOP_ERRORS}
|
||||
>
|
||||
<div className="view-title">Top 10 Errors</div>
|
||||
</Radio.Button>
|
||||
</Radio.Group>
|
||||
</div>
|
||||
@@ -126,15 +208,28 @@ function DomainDetails({
|
||||
setSelectedView={setSelectedView}
|
||||
groupBy={endPointsGroupBy}
|
||||
setGroupBy={setEndPointsGroupBy}
|
||||
timeRange={modalTimeRange}
|
||||
initialFilters={domainListFilters}
|
||||
setInitialFiltersEndPointStats={setInitialFiltersEndPointStats}
|
||||
/>
|
||||
)}
|
||||
|
||||
{selectedView === VIEW_TYPES.ENDPOINT_DETAILS && (
|
||||
<EndPointDetailsWrapper
|
||||
{selectedView === VIEW_TYPES.ENDPOINT_STATS && (
|
||||
<EndPointDetails
|
||||
domainName={domainData.domainName}
|
||||
endPointName={selectedEndPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
initialFilters={initialFiltersEndPointStats}
|
||||
timeRange={modalTimeRange}
|
||||
handleTimeChange={handleTimeChange}
|
||||
/>
|
||||
)}
|
||||
|
||||
{selectedView === VIEW_TYPES.TOP_ERRORS && (
|
||||
<TopErrors
|
||||
domainName={domainData.domainName}
|
||||
timeRange={modalTimeRange}
|
||||
initialFilters={domainListFilters}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
|
||||
@@ -8,16 +8,18 @@ import {
|
||||
getRateOverTimeWidgetData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||
import {
|
||||
CustomTimeType,
|
||||
Time,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import DependentServices from './components/DependentServices';
|
||||
import EndPointMetrics from './components/EndPointMetrics';
|
||||
@@ -25,33 +27,107 @@ import EndPointsDropDown from './components/EndPointsDropDown';
|
||||
import MetricOverTimeGraph from './components/MetricOverTimeGraph';
|
||||
import StatusCodeBarCharts from './components/StatusCodeBarCharts';
|
||||
import StatusCodeTable from './components/StatusCodeTable';
|
||||
import { SPAN_ATTRIBUTES } from './constants';
|
||||
|
||||
const httpUrlKey = {
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
type: 'tag',
|
||||
};
|
||||
|
||||
function EndPointDetails({
|
||||
domainName,
|
||||
endPointName,
|
||||
setSelectedEndPointName,
|
||||
domainListFilters,
|
||||
initialFilters,
|
||||
timeRange,
|
||||
handleTimeChange,
|
||||
}: {
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
initialFilters: IBuilderQuery['filters'];
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
};
|
||||
handleTimeChange: (
|
||||
interval: Time | CustomTimeType,
|
||||
dateTimeRange?: [number, number],
|
||||
) => void;
|
||||
}): JSX.Element {
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const { startTime: minTime, endTime: maxTime } = timeRange;
|
||||
|
||||
const currentQuery = initialQueriesMap[DataSource.TRACES];
|
||||
|
||||
const [filters, setFilters] = useState<IBuilderQuery['filters']>({
|
||||
op: 'AND',
|
||||
items: [],
|
||||
// Local state for filters, combining endpoint filter and search filters
|
||||
const [filters, setFilters] = useState<IBuilderQuery['filters']>(() => {
|
||||
// Initialize filters based on the initial endPointName prop
|
||||
const initialItems = [...initialFilters.items];
|
||||
if (endPointName) {
|
||||
initialItems.push({
|
||||
id: '92b8a1c1',
|
||||
key: httpUrlKey,
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
});
|
||||
}
|
||||
return { op: 'AND', items: initialItems };
|
||||
});
|
||||
|
||||
// Manually update the query to include the filters
|
||||
// Because using the hook is causing the global domain
|
||||
// query to be updated and causing main domain list to
|
||||
// refetch with the filters of endpoints
|
||||
// Effect to synchronize local filters when the endPointName prop changes (e.g., from dropdown)
|
||||
useEffect(() => {
|
||||
setFilters((currentFilters) => {
|
||||
const existingHttpUrlFilter = currentFilters.items.find(
|
||||
(item) => item.key?.key === httpUrlKey.key,
|
||||
);
|
||||
const existingHttpUrlValue = (existingHttpUrlFilter?.value as string) || '';
|
||||
|
||||
// Only update filters if the prop value is different from what's already in filters
|
||||
if (endPointName === existingHttpUrlValue) {
|
||||
return currentFilters; // No change needed, prevents loop
|
||||
}
|
||||
|
||||
// Rebuild filters: Keep non-http.url filters and add/update http.url filter based on prop
|
||||
const otherFilters = currentFilters.items.filter(
|
||||
(item) => item.key?.key !== httpUrlKey.key,
|
||||
);
|
||||
const newItems = [...otherFilters];
|
||||
if (endPointName) {
|
||||
newItems.push({
|
||||
id: '92b8a1c1',
|
||||
key: httpUrlKey,
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
});
|
||||
}
|
||||
return { op: 'AND', items: newItems };
|
||||
});
|
||||
}, [endPointName]);
|
||||
|
||||
// Handler for changes from the QueryBuilderSearchV2 component
|
||||
const handleFilterChange = useCallback(
|
||||
(newFilters: IBuilderQuery['filters']): void => {
|
||||
// 1. Update local filters state immediately
|
||||
setFilters(newFilters);
|
||||
|
||||
// 2. Derive the endpoint name from the *new* filters state
|
||||
const httpUrlFilter = newFilters.items.find(
|
||||
(item) => item.key?.key === httpUrlKey.key,
|
||||
);
|
||||
const derivedEndPointName = (httpUrlFilter?.value as string) || '';
|
||||
|
||||
// 3. If the derived endpoint name is different from the current prop,
|
||||
// it means the search change modified the effective endpoint.
|
||||
// Notify the parent component.
|
||||
if (derivedEndPointName !== endPointName) {
|
||||
setSelectedEndPointName(derivedEndPointName);
|
||||
}
|
||||
},
|
||||
[endPointName, setSelectedEndPointName], // Dependencies for the callback
|
||||
);
|
||||
|
||||
const updatedCurrentQuery = useMemo(
|
||||
() => ({
|
||||
@@ -62,7 +138,7 @@ function EndPointDetails({
|
||||
{
|
||||
...currentQuery.builder.queryData[0],
|
||||
dataSource: DataSource.TRACES,
|
||||
filters,
|
||||
filters, // Use the local filters state
|
||||
},
|
||||
],
|
||||
},
|
||||
@@ -78,15 +154,8 @@ function EndPointDetails({
|
||||
);
|
||||
|
||||
const endPointDetailsQueryPayload = useMemo(
|
||||
() =>
|
||||
getEndPointDetailsQueryPayload(
|
||||
domainName,
|
||||
endPointName,
|
||||
Math.floor(minTime / 1e9),
|
||||
Math.floor(maxTime / 1e9),
|
||||
filters,
|
||||
),
|
||||
[domainName, endPointName, filters, minTime, maxTime],
|
||||
() => getEndPointDetailsQueryPayload(domainName, minTime, maxTime, filters),
|
||||
[domainName, filters, minTime, maxTime],
|
||||
);
|
||||
|
||||
const endPointDetailsDataQueries = useQueries(
|
||||
@@ -94,7 +163,7 @@ function EndPointDetails({
|
||||
queryKey: [
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[index],
|
||||
payload,
|
||||
filters.items,
|
||||
filters.items, // Include filters.items in queryKey for better caching
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
@@ -123,22 +192,30 @@ function EndPointDetails({
|
||||
);
|
||||
|
||||
const { endpoint, port } = useMemo(
|
||||
() => extractPortAndEndpoint(endPointName),
|
||||
() => extractPortAndEndpoint(endPointName), // Derive display info from the prop
|
||||
[endPointName],
|
||||
);
|
||||
|
||||
const [rateOverTimeWidget, latencyOverTimeWidget] = useMemo(
|
||||
() => [
|
||||
getRateOverTimeWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
getLatencyOverTimeWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
getRateOverTimeWidgetData(domainName, endPointName, filters),
|
||||
getLatencyOverTimeWidgetData(domainName, endPointName, filters),
|
||||
],
|
||||
[domainName, endPointName, filters, domainListFilters],
|
||||
[domainName, endPointName, filters], // Use combinedFilters
|
||||
);
|
||||
|
||||
// // [TODO] Fix this later
|
||||
const onDragSelect = useCallback(
|
||||
(start: number, end: number) => {
|
||||
const startTimestamp = Math.trunc(start);
|
||||
const endTimestamp = Math.trunc(end);
|
||||
|
||||
if (startTimestamp !== endTimestamp) {
|
||||
// update the value in local time picker
|
||||
handleTimeChange('custom', [startTimestamp, endTimestamp]);
|
||||
}
|
||||
},
|
||||
[handleTimeChange],
|
||||
);
|
||||
|
||||
return (
|
||||
@@ -156,9 +233,7 @@ function EndPointDetails({
|
||||
<div className="endpoint-details-filters-container-search">
|
||||
<QueryBuilderSearchV2
|
||||
query={query}
|
||||
onChange={(searchFilters): void => {
|
||||
setFilters(searchFilters);
|
||||
}}
|
||||
onChange={handleFilterChange}
|
||||
placeholder="Search for filters..."
|
||||
/>
|
||||
</div>
|
||||
@@ -166,7 +241,9 @@ function EndPointDetails({
|
||||
<div className="endpoint-meta-data">
|
||||
<div className="endpoint-meta-data-pill">
|
||||
<div className="endpoint-meta-data-label">Endpoint</div>
|
||||
<div className="endpoint-meta-data-value">{endpoint || '-'}</div>
|
||||
<div className="endpoint-meta-data-value">
|
||||
{endpoint || 'All Endpoints'}
|
||||
</div>
|
||||
</div>
|
||||
<div className="endpoint-meta-data-pill">
|
||||
<div className="endpoint-meta-data-label">Port</div>
|
||||
@@ -177,6 +254,7 @@ function EndPointDetails({
|
||||
{!isServicesFilterApplied && (
|
||||
<DependentServices
|
||||
dependentServicesQuery={endPointDependentServicesDataQuery}
|
||||
timeRange={timeRange}
|
||||
/>
|
||||
)}
|
||||
<StatusCodeBarCharts
|
||||
@@ -186,12 +264,21 @@ function EndPointDetails({
|
||||
}
|
||||
domainName={domainName}
|
||||
endPointName={endPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
filters={filters}
|
||||
timeRange={timeRange}
|
||||
onDragSelect={onDragSelect}
|
||||
/>
|
||||
<StatusCodeTable endPointStatusCodeDataQuery={endPointStatusCodeDataQuery} />
|
||||
<MetricOverTimeGraph widget={rateOverTimeWidget} />
|
||||
<MetricOverTimeGraph widget={latencyOverTimeWidget} />
|
||||
<MetricOverTimeGraph
|
||||
widget={rateOverTimeWidget}
|
||||
timeRange={timeRange}
|
||||
onDragSelect={onDragSelect}
|
||||
/>
|
||||
<MetricOverTimeGraph
|
||||
widget={latencyOverTimeWidget}
|
||||
timeRange={timeRange}
|
||||
onDragSelect={onDragSelect}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { getEndPointZeroStateQueryPayload } from 'container/ApiMonitoring/utils';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useMemo } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import EndPointDetailsZeroState from './components/EndPointDetailsZeroState';
|
||||
import EndPointDetails from './EndPointDetails';
|
||||
|
||||
function EndPointDetailsWrapper({
|
||||
domainName,
|
||||
endPointName,
|
||||
setSelectedEndPointName,
|
||||
domainListFilters,
|
||||
}: {
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const endPointZeroStateQueryPayload = useMemo(
|
||||
() =>
|
||||
getEndPointZeroStateQueryPayload(
|
||||
domainName,
|
||||
Math.floor(minTime / 1e9),
|
||||
Math.floor(maxTime / 1e9),
|
||||
),
|
||||
[domainName, minTime, maxTime],
|
||||
);
|
||||
|
||||
const endPointZeroStateDataQueries = useQueries(
|
||||
endPointZeroStateQueryPayload.map((payload) => ({
|
||||
queryKey: [
|
||||
// Since only one query here
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_DROPDOWN_DATA,
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
})),
|
||||
);
|
||||
|
||||
const [endPointZeroStateDataQuery] = useMemo(
|
||||
() => [endPointZeroStateDataQueries[0]],
|
||||
[endPointZeroStateDataQueries],
|
||||
);
|
||||
|
||||
if (endPointName === '') {
|
||||
return (
|
||||
<EndPointDetailsZeroState
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
endPointDropDownDataQuery={endPointZeroStateDataQuery}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<EndPointDetails
|
||||
domainName={domainName}
|
||||
endPointName={endPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
domainListFilters={domainListFilters}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default EndPointDetailsWrapper;
|
||||
@@ -0,0 +1,251 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Spin, Switch, Table, Tooltip, Typography } from 'antd';
|
||||
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
||||
import { DEFAULT_ENTITY_VERSION, ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||
formatTopErrorsDataForTable,
|
||||
getEndPointDetailsQueryPayload,
|
||||
getTopErrorsColumnsConfig,
|
||||
getTopErrorsCoRelationQueryFilters,
|
||||
getTopErrorsQueryPayload,
|
||||
TopErrorsResponseRow,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { Info } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import EndPointsDropDown from './components/EndPointsDropDown';
|
||||
import ErrorState from './components/ErrorState';
|
||||
import { SPAN_ATTRIBUTES } from './constants';
|
||||
|
||||
function TopErrors({
|
||||
domainName,
|
||||
timeRange,
|
||||
initialFilters,
|
||||
}: {
|
||||
domainName: string;
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
};
|
||||
initialFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const { startTime: minTime, endTime: maxTime } = timeRange;
|
||||
|
||||
const [endPointName, setSelectedEndPointName] = useState<string>('');
|
||||
const [showStatusCodeErrors, setShowStatusCodeErrors] = useState<boolean>(
|
||||
true,
|
||||
);
|
||||
|
||||
const queryPayloads = useMemo(
|
||||
() =>
|
||||
getTopErrorsQueryPayload(
|
||||
domainName,
|
||||
minTime,
|
||||
maxTime,
|
||||
{
|
||||
items: endPointName
|
||||
? [
|
||||
{
|
||||
id: '92b8a1c1',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
},
|
||||
...initialFilters.items,
|
||||
]
|
||||
: [...initialFilters.items],
|
||||
op: 'AND',
|
||||
},
|
||||
showStatusCodeErrors,
|
||||
),
|
||||
[
|
||||
domainName,
|
||||
endPointName,
|
||||
minTime,
|
||||
maxTime,
|
||||
initialFilters,
|
||||
showStatusCodeErrors,
|
||||
],
|
||||
);
|
||||
|
||||
const topErrorsDataQueries = useQueries(
|
||||
queryPayloads.map((payload) => ({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
|
||||
payload,
|
||||
DEFAULT_ENTITY_VERSION,
|
||||
showStatusCodeErrors,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, DEFAULT_ENTITY_VERSION),
|
||||
enabled: !!payload,
|
||||
staleTime: 0,
|
||||
cacheTime: 0,
|
||||
})),
|
||||
);
|
||||
|
||||
const topErrorsDataQuery = topErrorsDataQueries[0];
|
||||
const {
|
||||
data: topErrorsData,
|
||||
isLoading,
|
||||
isRefetching,
|
||||
isError,
|
||||
refetch,
|
||||
} = topErrorsDataQuery;
|
||||
|
||||
const topErrorsColumnsConfig = useMemo(() => getTopErrorsColumnsConfig(), []);
|
||||
|
||||
const formattedTopErrorsData = useMemo(
|
||||
() =>
|
||||
formatTopErrorsDataForTable(
|
||||
topErrorsData?.payload?.data?.result as TopErrorsResponseRow[],
|
||||
),
|
||||
[topErrorsData],
|
||||
);
|
||||
|
||||
const endPointDropDownQueryPayload = useMemo(
|
||||
() => [
|
||||
getEndPointDetailsQueryPayload(domainName, minTime, maxTime, {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
})[2],
|
||||
],
|
||||
[domainName, minTime, maxTime],
|
||||
);
|
||||
|
||||
const endPointDropDownDataQueries = useQueries(
|
||||
endPointDropDownQueryPayload.map((payload) => ({
|
||||
queryKey: [
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[4],
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
staleTime: 60 * 1000,
|
||||
})),
|
||||
);
|
||||
|
||||
const [endPointDropDownDataQuery] = useMemo(
|
||||
() => [endPointDropDownDataQueries[0]],
|
||||
[endPointDropDownDataQueries],
|
||||
);
|
||||
|
||||
const navigateToExplorer = useNavigateToExplorer();
|
||||
|
||||
if (isError) {
|
||||
return (
|
||||
<div className="all-endpoints-error-state-wrapper">
|
||||
<ErrorState refetch={refetch} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="all-endpoints-container">
|
||||
<div className="top-errors-dropdown-container">
|
||||
<div className="endpoint-details-filters-container-dropdown">
|
||||
<EndPointsDropDown
|
||||
selectedEndPointName={endPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
endPointDropDownDataQuery={endPointDropDownDataQuery}
|
||||
parentContainerDiv=".endpoint-details-filters-container"
|
||||
/>
|
||||
</div>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: '8px' }}>
|
||||
<Switch
|
||||
checked={showStatusCodeErrors}
|
||||
onChange={setShowStatusCodeErrors}
|
||||
size="small"
|
||||
/>
|
||||
<span style={{ color: 'white', fontSize: '14px' }}>
|
||||
Status Message Exists
|
||||
</span>
|
||||
<Tooltip title="When enabled, shows errors that have a status message. When disabled, shows all errors regardless of status message">
|
||||
<Info size={16} color="white" />
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="endpoints-table-container">
|
||||
<div className="endpoints-table-header">
|
||||
{showStatusCodeErrors ? 'Errors with Status Message' : 'All Errors'}{' '}
|
||||
<Tooltip
|
||||
title={
|
||||
showStatusCodeErrors
|
||||
? 'Shows errors that have a status message'
|
||||
: 'Shows all errors regardless of status message'
|
||||
}
|
||||
>
|
||||
<Info size={16} color="white" />
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Table
|
||||
columns={topErrorsColumnsConfig}
|
||||
loading={{
|
||||
spinning: isLoading || isRefetching,
|
||||
indicator: <Spin indicator={<LoadingOutlined size={14} spin />} />,
|
||||
}}
|
||||
dataSource={isLoading || isRefetching ? [] : formattedTopErrorsData}
|
||||
locale={{
|
||||
emptyText:
|
||||
isLoading || isRefetching ? null : (
|
||||
<div className="no-filtered-endpoints-message-container">
|
||||
<div className="no-filtered-endpoints-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
|
||||
<Typography.Text className="no-filtered-endpoints-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
}}
|
||||
scroll={{ x: true }}
|
||||
tableLayout="fixed"
|
||||
rowClassName={(_, index): string =>
|
||||
index % 2 === 0 ? 'table-row-dark' : 'table-row-light'
|
||||
}
|
||||
onRow={(record): { onClick: () => void } => ({
|
||||
onClick: (): void => {
|
||||
const filters = getTopErrorsCoRelationQueryFilters(
|
||||
domainName,
|
||||
record.endpointName,
|
||||
record.statusCode,
|
||||
);
|
||||
navigateToExplorer({
|
||||
filters: [...filters.items],
|
||||
dataSource: DataSource.TRACES,
|
||||
startTime: minTime,
|
||||
endTime: maxTime,
|
||||
shouldResolveQuery: true,
|
||||
});
|
||||
},
|
||||
})}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default TopErrors;
|
||||
@@ -1,6 +1,13 @@
|
||||
import { Typography } from 'antd';
|
||||
import '../DomainDetails.styles.scss';
|
||||
|
||||
import { Table, TablePaginationConfig, Typography } from 'antd';
|
||||
import Skeleton from 'antd/lib/skeleton';
|
||||
import { getFormattedDependentServicesData } from 'container/ApiMonitoring/utils';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import {
|
||||
dependentServicesColumns,
|
||||
DependentServicesData,
|
||||
getFormattedDependentServicesData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { UnfoldVertical } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
@@ -10,10 +17,15 @@ import ErrorState from './ErrorState';
|
||||
|
||||
interface DependentServicesProps {
|
||||
dependentServicesQuery: UseQueryResult<SuccessResponse<any>, unknown>;
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
};
|
||||
}
|
||||
|
||||
function DependentServices({
|
||||
dependentServicesQuery,
|
||||
timeRange,
|
||||
}: DependentServicesProps): JSX.Element {
|
||||
const {
|
||||
data,
|
||||
@@ -23,19 +35,25 @@ function DependentServices({
|
||||
isRefetching,
|
||||
} = dependentServicesQuery;
|
||||
|
||||
const [currentRenderCount, setCurrentRenderCount] = useState(0);
|
||||
const [isExpanded, setIsExpanded] = useState<boolean>(false);
|
||||
|
||||
const dependentServicesData = useMemo(() => {
|
||||
const formattedDependentServicesData = getFormattedDependentServicesData(
|
||||
data?.payload?.data?.result[0].table.rows,
|
||||
);
|
||||
setCurrentRenderCount(Math.min(formattedDependentServicesData.length, 5));
|
||||
return formattedDependentServicesData;
|
||||
}, [data]);
|
||||
const handleShowMoreClick = (): void => {
|
||||
setIsExpanded((prev) => !prev);
|
||||
};
|
||||
|
||||
const renderItems = useMemo(
|
||||
() => dependentServicesData.slice(0, currentRenderCount),
|
||||
[currentRenderCount, dependentServicesData],
|
||||
const dependentServicesData = useMemo(
|
||||
(): DependentServicesData[] =>
|
||||
getFormattedDependentServicesData(data?.payload?.data?.result[0].table.rows),
|
||||
[data],
|
||||
);
|
||||
|
||||
const paginationConfig = useMemo(
|
||||
(): TablePaginationConfig => ({
|
||||
pageSize: isExpanded ? dependentServicesData.length : 5,
|
||||
hideOnSinglePage: true,
|
||||
position: ['none', 'none'],
|
||||
}),
|
||||
[isExpanded, dependentServicesData.length],
|
||||
);
|
||||
|
||||
if (isLoading || isRefetching) {
|
||||
@@ -48,56 +66,66 @@ function DependentServices({
|
||||
|
||||
return (
|
||||
<div className="top-services-content">
|
||||
<div className="top-services-title">
|
||||
<span className="title-wrapper">Dependent Services</span>
|
||||
</div>
|
||||
<div className="dependent-services-container">
|
||||
{renderItems.length === 0 ? (
|
||||
<div className="no-dependent-services-message-container">
|
||||
<div className="no-dependent-services-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
<Table
|
||||
loading={isLoading || isRefetching}
|
||||
dataSource={dependentServicesData || []}
|
||||
columns={dependentServicesColumns}
|
||||
rowClassName="table-row-dark"
|
||||
pagination={paginationConfig}
|
||||
locale={{
|
||||
emptyText:
|
||||
isLoading || isRefetching ? null : (
|
||||
<div className="no-status-code-data-message-container">
|
||||
<div className="no-status-code-data-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
|
||||
<Typography.Text className="no-dependent-services-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
renderItems.map((item) => (
|
||||
<div className="top-services-item" key={item.key}>
|
||||
<div className="top-services-item-progress">
|
||||
<div className="top-services-item-key">{item.serviceName}</div>
|
||||
<div className="top-services-item-count">{item.count}</div>
|
||||
<div
|
||||
className="top-services-item-progress-bar"
|
||||
style={{ width: `${item.percentage}%` }}
|
||||
/>
|
||||
</div>
|
||||
<div className="top-services-item-percentage">
|
||||
{item.percentage.toFixed(2)}%
|
||||
</div>
|
||||
</div>
|
||||
))
|
||||
)}
|
||||
<Typography.Text className="no-status-code-data-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
}}
|
||||
onRow={(record): { onClick: () => void; className: string } => ({
|
||||
onClick: (): void => {
|
||||
const url = new URL(
|
||||
`/services/${
|
||||
record.serviceData.serviceName &&
|
||||
record.serviceData.serviceName !== '-'
|
||||
? record.serviceData.serviceName
|
||||
: ''
|
||||
}`,
|
||||
window.location.origin,
|
||||
);
|
||||
const urlQuery = new URLSearchParams();
|
||||
urlQuery.set(QueryParams.startTime, timeRange.startTime.toString());
|
||||
urlQuery.set(QueryParams.endTime, timeRange.endTime.toString());
|
||||
url.search = urlQuery.toString();
|
||||
window.open(url.toString(), '_blank');
|
||||
},
|
||||
className: 'clickable-row',
|
||||
})}
|
||||
/>
|
||||
|
||||
{currentRenderCount < dependentServicesData.length && (
|
||||
{dependentServicesData.length > 5 && (
|
||||
<div
|
||||
className="top-services-load-more"
|
||||
onClick={(): void => setCurrentRenderCount(dependentServicesData.length)}
|
||||
onClick={handleShowMoreClick}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter') {
|
||||
setCurrentRenderCount(dependentServicesData.length);
|
||||
handleShowMoreClick();
|
||||
}
|
||||
}}
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
>
|
||||
<UnfoldVertical size={14} />
|
||||
Show more...
|
||||
{isExpanded ? 'Show less...' : 'Show more...'}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -1,8 +1,88 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Progress, Tooltip, Typography } from 'antd';
|
||||
import { getLastUsedRelativeTime } from 'container/ApiMonitoring/utils';
|
||||
import { Progress, Skeleton, Tooltip, Typography } from 'antd';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
DomainMetricsResponseRow,
|
||||
formatDomainMetricsDataForTable,
|
||||
getDomainMetricsQueryPayload,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useMemo } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
|
||||
function DomainMetrics({
|
||||
domainName,
|
||||
timeRange,
|
||||
domainListFilters,
|
||||
}: {
|
||||
domainName: string;
|
||||
timeRange: { startTime: number; endTime: number };
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
}): JSX.Element {
|
||||
const { startTime: minTime, endTime: maxTime } = timeRange;
|
||||
|
||||
const queryPayloads = useMemo(
|
||||
() =>
|
||||
getDomainMetricsQueryPayload(
|
||||
domainName,
|
||||
minTime,
|
||||
maxTime,
|
||||
domainListFilters,
|
||||
),
|
||||
[domainName, minTime, maxTime, domainListFilters],
|
||||
);
|
||||
|
||||
// Since only one query here
|
||||
const domainMetricsDataQueries = useQueries(
|
||||
queryPayloads.map((payload) => ({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_DOMAIN_METRICS_DATA,
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
staleTime: 60 * 1000, // 1 minute stale time : optimize this part
|
||||
})),
|
||||
);
|
||||
|
||||
const domainMetricsDataQuery = domainMetricsDataQueries[0];
|
||||
// [TODO] handle the case where the data is not available
|
||||
// [TODO] Format the data properly
|
||||
const {
|
||||
data: domainMetricsData,
|
||||
isLoading,
|
||||
isRefetching,
|
||||
isError,
|
||||
refetch,
|
||||
} = domainMetricsDataQuery;
|
||||
|
||||
// [TODO] Fix type error
|
||||
const formattedDomainMetricsData = useMemo(() => {
|
||||
// Safely access the data with proper type checking
|
||||
const rowData = domainMetricsData?.payload?.data?.result[0]?.table?.rows[0];
|
||||
|
||||
// Only pass the data if it matches the expected format
|
||||
return formatDomainMetricsDataForTable(
|
||||
rowData as DomainMetricsResponseRow | undefined,
|
||||
);
|
||||
}, [domainMetricsData]);
|
||||
|
||||
if (isError) {
|
||||
return (
|
||||
<div className="all-endpoints-error-state-wrapper">
|
||||
<ErrorState refetch={refetch} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function DomainMetrics({ domainData }: { domainData: any }): JSX.Element {
|
||||
return (
|
||||
<div className="domain-detail-drawer__endpoint">
|
||||
<div className="domain-details-grid">
|
||||
@@ -23,7 +103,7 @@ function DomainMetrics({ domainData }: { domainData: any }): JSX.Element {
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
ERROR RATE
|
||||
ERROR %
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
@@ -35,43 +115,62 @@ function DomainMetrics({ domainData }: { domainData: any }): JSX.Element {
|
||||
|
||||
<div className="values-row">
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
<Tooltip title={domainData.endpointCount}>
|
||||
<span className="round-metric-tag">{domainData.endpointCount}</span>
|
||||
</Tooltip>
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={formattedDomainMetricsData.endpointCount}>
|
||||
<span className="round-metric-tag">
|
||||
{formattedDomainMetricsData.endpointCount}
|
||||
</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
{/* // update the tooltip as well */}
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
<Tooltip title={domainData.latency}>
|
||||
<span className="round-metric-tag">
|
||||
{(domainData.latency / 1000).toFixed(3)}s
|
||||
</span>
|
||||
</Tooltip>
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={formattedDomainMetricsData.latency}>
|
||||
<span className="round-metric-tag">
|
||||
{(Number(formattedDomainMetricsData.latency) / 1000).toFixed(3)}s
|
||||
</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
{/* // update the tooltip as well */}
|
||||
<Typography.Text className="domain-details-metadata-value error-rate">
|
||||
<Tooltip title={domainData.errorRate}>
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number((domainData.errorRate * 100).toFixed(1))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
(domainData.errorRate * 100).toFixed(1),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
</Tooltip>
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={formattedDomainMetricsData.errorRate}>
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number(
|
||||
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
||||
)}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
{/* // update the tooltip as well */}
|
||||
<Typography.Text className="domain-details-metadata-value">
|
||||
<Tooltip title={domainData.lastUsed}>
|
||||
{getLastUsedRelativeTime(domainData.lastUsed)}
|
||||
</Tooltip>
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={formattedDomainMetricsData.lastUsed}>
|
||||
{formattedDomainMetricsData.lastUsed}
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -54,7 +54,7 @@ function EndPointMetrics({
|
||||
type="secondary"
|
||||
className="domain-details-metadata-label"
|
||||
>
|
||||
ERROR RATE
|
||||
ERROR %
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
@@ -89,12 +89,13 @@ function EndPointMetrics({
|
||||
) : (
|
||||
<Tooltip title={metricsData?.errorRate}>
|
||||
<Progress
|
||||
percent={Number((metricsData?.errorRate ?? 0 * 100).toFixed(1))}
|
||||
status="active"
|
||||
percent={Number(Number(metricsData?.errorRate ?? 0).toFixed(2))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
(metricsData?.errorRate ?? 0 * 100).toFixed(1),
|
||||
Number(metricsData?.errorRate ?? 0).toFixed(2),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
|
||||
@@ -52,6 +52,10 @@ function EndPointsDropDown({
|
||||
: (triggerNode): HTMLElement => triggerNode.parentNode as HTMLElement
|
||||
}
|
||||
dropdownStyle={dropdownStyle}
|
||||
allowClear
|
||||
onClear={(): void => {
|
||||
setSelectedEndPointName('');
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { OrderByPayload } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import { VIEW_TYPES, VIEWS } from '../constants';
|
||||
@@ -28,11 +29,13 @@ function ExpandedRow({
|
||||
selectedRowData,
|
||||
setSelectedEndPointName,
|
||||
setSelectedView,
|
||||
orderBy,
|
||||
}: {
|
||||
domainName: string;
|
||||
selectedRowData: EndPointsTableRowData;
|
||||
setSelectedEndPointName: (name: string) => void;
|
||||
setSelectedView: (view: VIEWS) => void;
|
||||
orderBy: OrderByPayload | null;
|
||||
}): JSX.Element {
|
||||
const nestedColumns = useMemo(() => getEndPointsColumnsConfig(false, []), []);
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
@@ -100,6 +103,7 @@ function ExpandedRow({
|
||||
? formatEndPointsDataForTable(
|
||||
groupedByRowQuery.data?.payload.data.result[0].table?.rows,
|
||||
[],
|
||||
orderBy,
|
||||
)
|
||||
: []
|
||||
}
|
||||
@@ -114,7 +118,7 @@ function ExpandedRow({
|
||||
onRow={(record): { onClick: () => void; className: string } => ({
|
||||
onClick: (): void => {
|
||||
setSelectedEndPointName(record.endpointName);
|
||||
setSelectedView(VIEW_TYPES.ENDPOINT_DETAILS);
|
||||
setSelectedView(VIEW_TYPES.ENDPOINT_STATS);
|
||||
logEvent('API Monitoring: Endpoint name row clicked', {});
|
||||
},
|
||||
className: 'expanded-clickable-row',
|
||||
|
||||
@@ -2,7 +2,15 @@ import { Card } from 'antd';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
function MetricOverTimeGraph({ widget }: { widget: Widgets }): JSX.Element {
|
||||
function MetricOverTimeGraph({
|
||||
widget,
|
||||
timeRange,
|
||||
onDragSelect,
|
||||
}: {
|
||||
widget: Widgets;
|
||||
timeRange: { startTime: number; endTime: number };
|
||||
onDragSelect: (start: number, end: number) => void;
|
||||
}): JSX.Element {
|
||||
return (
|
||||
<div>
|
||||
<Card bordered className="endpoint-details-card">
|
||||
@@ -10,8 +18,9 @@ function MetricOverTimeGraph({ widget }: { widget: Widgets }): JSX.Element {
|
||||
<GridCard
|
||||
widget={widget}
|
||||
isQueryEnabled
|
||||
onDragSelect={(): void => {}}
|
||||
onDragSelect={onDragSelect}
|
||||
customOnDragSelect={(): void => {}}
|
||||
customTimeRange={timeRange}
|
||||
/>
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
@@ -21,12 +21,9 @@ import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||
import { useCallback, useMemo, useRef, useState } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { Options } from 'uplot';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
@@ -36,8 +33,9 @@ function StatusCodeBarCharts({
|
||||
endPointStatusCodeLatencyBarChartsDataQuery,
|
||||
domainName,
|
||||
endPointName,
|
||||
domainListFilters,
|
||||
filters,
|
||||
timeRange,
|
||||
onDragSelect,
|
||||
}: {
|
||||
endPointStatusCodeBarChartsDataQuery: UseQueryResult<
|
||||
SuccessResponse<any>,
|
||||
@@ -49,8 +47,12 @@ function StatusCodeBarCharts({
|
||||
>;
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
domainListFilters: IBuilderQuery['filters'];
|
||||
filters: IBuilderQuery['filters'];
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
};
|
||||
onDragSelect: (start: number, end: number) => void;
|
||||
}): JSX.Element {
|
||||
// 0 : Status Code Count
|
||||
// 1 : Status Code Latency
|
||||
@@ -64,9 +66,7 @@ function StatusCodeBarCharts({
|
||||
data: endPointStatusCodeLatencyBarChartsData,
|
||||
} = endPointStatusCodeLatencyBarChartsDataQuery;
|
||||
|
||||
const { minTime, maxTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const { startTime: minTime, endTime: maxTime } = timeRange;
|
||||
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const dimensions = useResizeObserver(graphRef);
|
||||
@@ -115,25 +115,30 @@ function StatusCodeBarCharts({
|
||||
const navigateToExplorerPages = useNavigateToExplorerPages();
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const { getCustomSeries } = useGetGraphCustomSeries({
|
||||
isDarkMode,
|
||||
drawStyle: 'bars',
|
||||
colorMapping: {
|
||||
const colorMapping = useMemo(
|
||||
() => ({
|
||||
'200-299': Color.BG_FOREST_500,
|
||||
'300-399': Color.BG_AMBER_400,
|
||||
'400-499': Color.BG_CHERRY_500,
|
||||
'500-599': Color.BG_ROBIN_500,
|
||||
Other: Color.BG_SIENNA_500,
|
||||
},
|
||||
}),
|
||||
[],
|
||||
);
|
||||
|
||||
const { getCustomSeries } = useGetGraphCustomSeries({
|
||||
isDarkMode,
|
||||
drawStyle: 'bars',
|
||||
colorMapping,
|
||||
});
|
||||
|
||||
const widget = useMemo<Widgets>(
|
||||
() =>
|
||||
getStatusCodeBarChartWidgetData(domainName, endPointName, {
|
||||
items: [...domainListFilters.items, ...filters.items],
|
||||
items: [...filters.items],
|
||||
op: filters.op,
|
||||
}),
|
||||
[domainName, endPointName, domainListFilters, filters],
|
||||
[domainName, endPointName, filters],
|
||||
);
|
||||
|
||||
const graphClickHandler = useCallback(
|
||||
@@ -182,11 +187,13 @@ function StatusCodeBarCharts({
|
||||
yAxisUnit: statusCodeWidgetInfo[currentWidgetInfoIndex].yAxisUnit,
|
||||
softMax: null,
|
||||
softMin: null,
|
||||
minTimeScale: Math.floor(minTime / 1e9),
|
||||
maxTimeScale: Math.floor(maxTime / 1e9),
|
||||
minTimeScale: minTime,
|
||||
maxTimeScale: maxTime,
|
||||
panelType: PANEL_TYPES.BAR,
|
||||
onClickHandler: graphClickHandler,
|
||||
customSeries: getCustomSeries,
|
||||
onDragSelect,
|
||||
colorMapping,
|
||||
}),
|
||||
[
|
||||
minTime,
|
||||
@@ -198,6 +205,8 @@ function StatusCodeBarCharts({
|
||||
isDarkMode,
|
||||
graphClickHandler,
|
||||
getCustomSeries,
|
||||
onDragSelect,
|
||||
colorMapping,
|
||||
],
|
||||
);
|
||||
|
||||
|
||||
@@ -1,9 +1,20 @@
|
||||
export enum VIEWS {
|
||||
ALL_ENDPOINTS = 'all_endpoints',
|
||||
ENDPOINT_DETAILS = 'endpoint_details',
|
||||
ENDPOINT_STATS = 'endpoint_stats',
|
||||
TOP_ERRORS = 'top_errors',
|
||||
}
|
||||
|
||||
export const VIEW_TYPES = {
|
||||
ALL_ENDPOINTS: VIEWS.ALL_ENDPOINTS,
|
||||
ENDPOINT_DETAILS: VIEWS.ENDPOINT_DETAILS,
|
||||
ENDPOINT_STATS: VIEWS.ENDPOINT_STATS,
|
||||
TOP_ERRORS: VIEWS.TOP_ERRORS,
|
||||
};
|
||||
|
||||
// Span attribute keys - these are the source of truth for all attribute keys
|
||||
export const SPAN_ATTRIBUTES = {
|
||||
URL_PATH: 'http.url',
|
||||
STATUS_CODE: 'status_code',
|
||||
RESPONSE_STATUS_CODE: 'response_status_code',
|
||||
SERVER_NAME: 'net.peer.name',
|
||||
SERVER_PORT: 'net.peer.port',
|
||||
} as const;
|
||||
|
||||
@@ -7,16 +7,22 @@ import logEvent from 'api/common/logEvent';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import cx from 'classnames';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import RightToolbarActions from 'container/QueryBuilder/components/ToolbarActions/RightToolbarActions';
|
||||
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import { useMemo, useState } from 'react';
|
||||
import Toolbar from 'container/Toolbar/Toolbar';
|
||||
import { useGetCompositeQueryParam } from 'hooks/queryBuilder/useGetCompositeQueryParam';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { HandleChangeQueryData } from 'types/common/operations.types';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import {
|
||||
@@ -26,20 +32,50 @@ import {
|
||||
} from '../../utils';
|
||||
import DomainDetails from './DomainDetails/DomainDetails';
|
||||
|
||||
function DomainList({
|
||||
query,
|
||||
showIP,
|
||||
handleChangeQueryData,
|
||||
}: {
|
||||
query: IBuilderQuery;
|
||||
showIP: boolean;
|
||||
handleChangeQueryData: HandleChangeQueryData;
|
||||
}): JSX.Element {
|
||||
function DomainList({ showIP }: { showIP: boolean }): JSX.Element {
|
||||
const [selectedDomainIndex, setSelectedDomainIndex] = useState<number>(-1);
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const { currentQuery, handleRunQuery } = useQueryBuilder();
|
||||
const query = useMemo(() => currentQuery?.builder?.queryData[0] || null, [
|
||||
currentQuery,
|
||||
]);
|
||||
|
||||
const { handleChangeQueryData } = useQueryOperations({
|
||||
index: 0,
|
||||
query,
|
||||
entityVersion: '',
|
||||
});
|
||||
|
||||
// initialise tab with default query.
|
||||
useShareBuilderUrl({
|
||||
...initialQueriesMap.traces,
|
||||
builder: {
|
||||
...initialQueriesMap.traces.builder,
|
||||
queryData: [
|
||||
{
|
||||
...initialQueriesMap.traces.builder.queryData[0],
|
||||
dataSource: DataSource.TRACES,
|
||||
aggregateOperator: 'noop',
|
||||
aggregateAttribute: {
|
||||
...initialQueriesMap.traces.builder.queryData[0].aggregateAttribute,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
const compositeData = useGetCompositeQueryParam();
|
||||
|
||||
const handleChangeTagFilters = useCallback(
|
||||
(value: IBuilderQuery['filters']) => {
|
||||
handleChangeQueryData('filters', value);
|
||||
},
|
||||
[handleChangeQueryData],
|
||||
);
|
||||
|
||||
const fetchApiOverview = async (): Promise<
|
||||
SuccessResponse<any> | ErrorResponse
|
||||
> => {
|
||||
@@ -49,7 +85,21 @@ function DomainList({
|
||||
show_ip: showIP,
|
||||
filters: {
|
||||
op: 'AND',
|
||||
items: query?.filters.items,
|
||||
items: [
|
||||
{
|
||||
id: '212678b9',
|
||||
key: {
|
||||
key: 'kind_string',
|
||||
dataType: 'string',
|
||||
type: '',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
},
|
||||
op: '=',
|
||||
value: 'Client',
|
||||
},
|
||||
...(compositeData?.builder?.queryData[0]?.filters.items || []),
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
@@ -70,7 +120,7 @@ function DomainList({
|
||||
};
|
||||
|
||||
const { data, isLoading, isFetching } = useQuery(
|
||||
[REACT_QUERY_KEY.GET_DOMAINS_LIST, minTime, maxTime, query, showIP],
|
||||
[REACT_QUERY_KEY.GET_DOMAINS_LIST, minTime, maxTime, compositeData, showIP],
|
||||
fetchApiOverview,
|
||||
);
|
||||
|
||||
@@ -81,20 +131,18 @@ function DomainList({
|
||||
|
||||
return (
|
||||
<section className={cx('api-module-right-section')}>
|
||||
<Toolbar
|
||||
showAutoRefresh={false}
|
||||
rightActions={<RightToolbarActions onStageRunQuery={handleRunQuery} />}
|
||||
/>
|
||||
{/* add bottom border here */}
|
||||
<div className={cx('api-monitoring-list-header')}>
|
||||
<QueryBuilderSearchV2
|
||||
query={query}
|
||||
onChange={(searchFilters): void =>
|
||||
handleChangeQueryData('filters', searchFilters)
|
||||
}
|
||||
onChange={handleChangeTagFilters}
|
||||
placeholder="Search filters..."
|
||||
hardcodedAttributeKeys={hardcodedAttributeKeys}
|
||||
/>
|
||||
<DateTimeSelectionV2
|
||||
showAutoRefresh={false}
|
||||
showRefreshText={false}
|
||||
hideShareModal
|
||||
/>
|
||||
</div>
|
||||
<Table
|
||||
className={cx('api-monitoring-domain-list-table')}
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
.api-quick-filters-header {
|
||||
padding: 12px;
|
||||
border-bottom: 1px solid var(--bg-slate-400);
|
||||
border-right: 1px solid var(--bg-slate-400);
|
||||
|
||||
display: flex;
|
||||
align-items: center;
|
||||
@@ -24,6 +25,10 @@
|
||||
flex-direction: column;
|
||||
width: 100%;
|
||||
|
||||
.toolbar {
|
||||
border-bottom: 1px solid var(--bg-slate-400);
|
||||
}
|
||||
|
||||
.api-monitoring-list-header {
|
||||
width: 100%;
|
||||
padding: 8px;
|
||||
|
||||
@@ -7,12 +7,8 @@ import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import QuickFilters from 'components/QuickFilters/QuickFilters';
|
||||
import { QuickFiltersSource } from 'components/QuickFilters/types';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { useEffect, useState } from 'react';
|
||||
|
||||
import { ApiMonitoringQuickFiltersConfig } from '../utils';
|
||||
import DomainList from './Domains/DomainList';
|
||||
@@ -20,39 +16,10 @@ import DomainList from './Domains/DomainList';
|
||||
function Explorer(): JSX.Element {
|
||||
const [showIP, setShowIP] = useState<boolean>(true);
|
||||
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
useEffect(() => {
|
||||
logEvent('API Monitoring: Landing page visited', {});
|
||||
}, []);
|
||||
|
||||
const { handleChangeQueryData } = useQueryOperations({
|
||||
index: 0,
|
||||
query: currentQuery.builder.queryData[0],
|
||||
entityVersion: '',
|
||||
});
|
||||
|
||||
const updatedCurrentQuery = useMemo(
|
||||
() => ({
|
||||
...currentQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...currentQuery.builder.queryData[0],
|
||||
dataSource: DataSource.TRACES,
|
||||
aggregateOperator: 'noop',
|
||||
aggregateAttribute: {
|
||||
...currentQuery.builder.queryData[0].aggregateAttribute,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
[currentQuery],
|
||||
);
|
||||
const query = updatedCurrentQuery?.builder?.queryData[0] || null;
|
||||
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<div className={cx('api-monitoring-page', 'filter-visible')}>
|
||||
@@ -83,16 +50,9 @@ function Explorer(): JSX.Element {
|
||||
source={QuickFiltersSource.API_MONITORING}
|
||||
config={ApiMonitoringQuickFiltersConfig}
|
||||
handleFilterVisibilityChange={(): void => {}}
|
||||
onFilterChange={(query: Query): void =>
|
||||
handleChangeQueryData('filters', query.builder.queryData[0].filters)
|
||||
}
|
||||
/>
|
||||
</section>
|
||||
<DomainList
|
||||
query={query}
|
||||
showIP={showIP}
|
||||
handleChangeQueryData={handleChangeQueryData}
|
||||
/>
|
||||
<DomainList showIP={showIP} />
|
||||
</div>
|
||||
</Sentry.ErrorBoundary>
|
||||
);
|
||||
|
||||
@@ -0,0 +1,190 @@
|
||||
import { cleanup, fireEvent, render, screen } from '@testing-library/react';
|
||||
import {
|
||||
getAllEndpointsWidgetData,
|
||||
getGroupByFiltersFromGroupByValues,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { useGetAggregateKeys } from 'hooks/queryBuilder/useGetAggregateKeys';
|
||||
|
||||
import AllEndPoints from '../Explorer/Domains/DomainDetails/AllEndPoints';
|
||||
import {
|
||||
SPAN_ATTRIBUTES,
|
||||
VIEWS,
|
||||
} from '../Explorer/Domains/DomainDetails/constants';
|
||||
|
||||
// Mock the dependencies
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
getAllEndpointsWidgetData: jest.fn(),
|
||||
getGroupByFiltersFromGroupByValues: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('container/GridCardLayout/GridCard', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(({ customOnRowClick }) => (
|
||||
<div data-testid="grid-card-mock">
|
||||
<button
|
||||
type="button"
|
||||
data-testid="row-click-button"
|
||||
onClick={(): void =>
|
||||
customOnRowClick({ [SPAN_ATTRIBUTES.URL_PATH]: '/api/test' })
|
||||
}
|
||||
>
|
||||
Click Row
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
jest.mock(
|
||||
'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(({ onChange }) => (
|
||||
<div data-testid="query-builder-mock">
|
||||
<button
|
||||
type="button"
|
||||
data-testid="filter-change-button"
|
||||
onClick={(): void =>
|
||||
onChange({
|
||||
items: [{ id: 'test', key: 'test', op: '=', value: 'test' }],
|
||||
op: 'AND',
|
||||
})
|
||||
}
|
||||
>
|
||||
Change Filter
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock('hooks/queryBuilder/useGetAggregateKeys', () => ({
|
||||
useGetAggregateKeys: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('antd', () => {
|
||||
const originalModule = jest.requireActual('antd');
|
||||
return {
|
||||
...originalModule,
|
||||
Select: jest.fn().mockImplementation(({ onChange }) => (
|
||||
<div data-testid="select-mock">
|
||||
<button
|
||||
data-testid="select-change-button"
|
||||
type="button"
|
||||
onClick={(): void => onChange(['http.status_code'])}
|
||||
>
|
||||
Change GroupBy
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
};
|
||||
});
|
||||
|
||||
describe('AllEndPoints', () => {
|
||||
const mockProps = {
|
||||
domainName: 'test-domain',
|
||||
setSelectedEndPointName: jest.fn(),
|
||||
setSelectedView: jest.fn(),
|
||||
groupBy: [],
|
||||
setGroupBy: jest.fn(),
|
||||
timeRange: {
|
||||
startTime: 1609459200000,
|
||||
endTime: 1609545600000,
|
||||
},
|
||||
initialFilters: { op: 'AND', items: [] },
|
||||
setInitialFiltersEndPointStats: jest.fn(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Setup mock implementations
|
||||
(useGetAggregateKeys as jest.Mock).mockReturnValue({
|
||||
data: {
|
||||
payload: {
|
||||
attributeKeys: [
|
||||
{
|
||||
key: 'http.status_code',
|
||||
dataType: 'string',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: '',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
});
|
||||
|
||||
(getAllEndpointsWidgetData as jest.Mock).mockReturnValue({
|
||||
id: 'test-widget',
|
||||
title: 'Endpoint Overview',
|
||||
description: 'Endpoint Overview',
|
||||
panelTypes: 'table',
|
||||
queryData: [],
|
||||
});
|
||||
|
||||
(getGroupByFiltersFromGroupByValues as jest.Mock).mockReturnValue({
|
||||
items: [{ id: 'group-filter', key: 'status', op: '=', value: '200' }],
|
||||
op: 'AND',
|
||||
});
|
||||
});
|
||||
|
||||
// Add cleanup after each test
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
});
|
||||
|
||||
it('renders component correctly', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<AllEndPoints {...mockProps} />);
|
||||
|
||||
// Verify basic component rendering
|
||||
expect(screen.getByText('Group by')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('query-builder-mock')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('select-mock')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('grid-card-mock')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('handles filter changes', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<AllEndPoints {...mockProps} />);
|
||||
|
||||
// Trigger filter change
|
||||
fireEvent.click(screen.getByTestId('filter-change-button'));
|
||||
|
||||
// Check if getAllEndpointsWidgetData was called with updated filters
|
||||
expect(getAllEndpointsWidgetData).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
'test-domain',
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([expect.objectContaining({ id: 'test' })]),
|
||||
op: 'AND',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('handles group by changes', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<AllEndPoints {...mockProps} />);
|
||||
|
||||
// Trigger group by change
|
||||
fireEvent.click(screen.getByTestId('select-change-button'));
|
||||
|
||||
// Check if setGroupBy was called with updated group by value
|
||||
expect(mockProps.setGroupBy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('handles row click in grid card', async () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<AllEndPoints {...mockProps} />);
|
||||
|
||||
// Trigger row click
|
||||
fireEvent.click(screen.getByTestId('row-click-button'));
|
||||
|
||||
// Check if proper functions were called
|
||||
expect(mockProps.setSelectedEndPointName).toHaveBeenCalledWith('/api/test');
|
||||
expect(mockProps.setSelectedView).toHaveBeenCalledWith(VIEWS.ENDPOINT_STATS);
|
||||
expect(mockProps.setInitialFiltersEndPointStats).toHaveBeenCalled();
|
||||
expect(getGroupByFiltersFromGroupByValues).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,366 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { getFormattedDependentServicesData } from 'container/ApiMonitoring/utils';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import DependentServices from '../Explorer/Domains/DomainDetails/components/DependentServices';
|
||||
import ErrorState from '../Explorer/Domains/DomainDetails/components/ErrorState';
|
||||
|
||||
// Create a partial mock of the UseQueryResult interface for testing
|
||||
interface MockQueryResult {
|
||||
isLoading: boolean;
|
||||
isRefetching: boolean;
|
||||
isError: boolean;
|
||||
data?: any;
|
||||
refetch: () => void;
|
||||
}
|
||||
|
||||
// Mock the utility function
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
getFormattedDependentServicesData: jest.fn(),
|
||||
dependentServicesColumns: [
|
||||
{ title: 'Dependent Services', dataIndex: 'serviceData', key: 'serviceData' },
|
||||
{ title: 'AVG. LATENCY', dataIndex: 'latency', key: 'latency' },
|
||||
{ title: 'ERROR %', dataIndex: 'errorPercentage', key: 'errorPercentage' },
|
||||
{ title: 'AVG. RATE', dataIndex: 'rate', key: 'rate' },
|
||||
],
|
||||
}));
|
||||
|
||||
// Mock the ErrorState component
|
||||
jest.mock('../Explorer/Domains/DomainDetails/components/ErrorState', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(({ refetch }) => (
|
||||
<div data-testid="error-state-mock">
|
||||
<button type="button" data-testid="refetch-button" onClick={refetch}>
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
// Mock antd components
|
||||
jest.mock('antd', () => {
|
||||
const originalModule = jest.requireActual('antd');
|
||||
return {
|
||||
...originalModule,
|
||||
Table: jest
|
||||
.fn()
|
||||
.mockImplementation(({ dataSource, loading, pagination, onRow }) => (
|
||||
<div data-testid="table-mock">
|
||||
<div data-testid="loading-state">
|
||||
{loading ? 'Loading' : 'Not Loading'}
|
||||
</div>
|
||||
<div data-testid="row-count">{dataSource?.length || 0}</div>
|
||||
<div data-testid="page-size">{pagination?.pageSize}</div>
|
||||
{dataSource?.map((item: any, index: number) => (
|
||||
<div
|
||||
key={`service-${item.key || index}`}
|
||||
data-testid={`table-row-${index}`}
|
||||
onClick={(): void => onRow?.(item)?.onClick?.()}
|
||||
onKeyDown={(e: React.KeyboardEvent<HTMLDivElement>): void => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
onRow?.(item)?.onClick?.();
|
||||
}
|
||||
}}
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
>
|
||||
{item.serviceData.serviceName}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)),
|
||||
Skeleton: jest
|
||||
.fn()
|
||||
.mockImplementation(() => <div data-testid="skeleton-mock" />),
|
||||
Typography: {
|
||||
Text: jest
|
||||
.fn()
|
||||
.mockImplementation(({ children }) => (
|
||||
<div data-testid="typography-text">{children}</div>
|
||||
)),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
describe('DependentServices', () => {
|
||||
// Sample mock data to use in tests
|
||||
const mockDependentServicesData = [
|
||||
{
|
||||
key: 'service1',
|
||||
serviceData: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
serviceName: 'auth-service',
|
||||
count: 500,
|
||||
percentage: 62.5,
|
||||
},
|
||||
latency: 120,
|
||||
rate: '15',
|
||||
errorPercentage: '2.5',
|
||||
},
|
||||
{
|
||||
key: 'service2',
|
||||
serviceData: {
|
||||
serviceName: 'db-service',
|
||||
count: 300,
|
||||
percentage: 37.5,
|
||||
},
|
||||
latency: 80,
|
||||
rate: '10',
|
||||
errorPercentage: '1.2',
|
||||
},
|
||||
];
|
||||
|
||||
// Default props for tests
|
||||
const mockTimeRange = {
|
||||
startTime: 1609459200000,
|
||||
endTime: 1609545600000,
|
||||
};
|
||||
|
||||
const refetchFn = jest.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
(getFormattedDependentServicesData as jest.Mock).mockReturnValue(
|
||||
mockDependentServicesData,
|
||||
);
|
||||
});
|
||||
|
||||
it('renders loading state correctly', () => {
|
||||
// Arrange
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: true,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
const { container } = render(
|
||||
<DependentServices
|
||||
dependentServicesQuery={mockQuery as any}
|
||||
timeRange={mockTimeRange}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(container.querySelector('.ant-skeleton')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders error state correctly', () => {
|
||||
// Arrange
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: true,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<DependentServices
|
||||
dependentServicesQuery={mockQuery as any}
|
||||
timeRange={mockTimeRange}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('error-state-mock')).toBeInTheDocument();
|
||||
expect(ErrorState).toHaveBeenCalledWith(
|
||||
{ refetch: expect.any(Function) },
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('renders data correctly when loaded', () => {
|
||||
// Arrange
|
||||
const mockData = {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{
|
||||
data: {
|
||||
'service.name': 'auth-service',
|
||||
A: '500',
|
||||
B: '120000000',
|
||||
C: '15',
|
||||
F1: '2.5',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<DependentServices
|
||||
dependentServicesQuery={mockQuery as any}
|
||||
timeRange={mockTimeRange}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(getFormattedDependentServicesData).toHaveBeenCalledWith(
|
||||
mockData.payload.data.result[0].table.rows,
|
||||
);
|
||||
|
||||
// Check the table was rendered with the correct data
|
||||
expect(screen.getByTestId('table-mock')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('loading-state')).toHaveTextContent('Not Loading');
|
||||
expect(screen.getByTestId('row-count')).toHaveTextContent('2');
|
||||
|
||||
// Default (collapsed) pagination should be 5
|
||||
expect(screen.getByTestId('page-size')).toHaveTextContent('5');
|
||||
});
|
||||
|
||||
it('handles refetching state correctly', () => {
|
||||
// Arrange
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: true,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
const { container } = render(
|
||||
<DependentServices
|
||||
dependentServicesQuery={mockQuery as any}
|
||||
timeRange={mockTimeRange}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(container.querySelector('.ant-skeleton')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('handles row click correctly', () => {
|
||||
// Mock window.open
|
||||
const originalOpen = window.open;
|
||||
window.open = jest.fn();
|
||||
|
||||
// Arrange
|
||||
const mockData = {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{
|
||||
data: {
|
||||
'service.name': 'auth-service',
|
||||
A: '500',
|
||||
B: '120000000',
|
||||
C: '15',
|
||||
F1: '2.5',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<DependentServices
|
||||
dependentServicesQuery={mockQuery as any}
|
||||
timeRange={mockTimeRange}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Click on the first row
|
||||
fireEvent.click(screen.getByTestId('table-row-0'));
|
||||
|
||||
// Assert
|
||||
expect(window.open).toHaveBeenCalledWith(
|
||||
expect.stringContaining('/services/auth-service'),
|
||||
'_blank',
|
||||
);
|
||||
|
||||
// Restore original window.open
|
||||
window.open = originalOpen;
|
||||
});
|
||||
|
||||
it('expands table when showing more', () => {
|
||||
// Set up more than 5 items so the "show more" button appears
|
||||
const moreItems = Array(8)
|
||||
.fill(0)
|
||||
.map((_, index) => ({
|
||||
key: `service${index}`,
|
||||
serviceData: {
|
||||
serviceName: `service-${index}`,
|
||||
count: 100,
|
||||
percentage: 12.5,
|
||||
},
|
||||
latency: 100,
|
||||
rate: '10',
|
||||
errorPercentage: '1',
|
||||
}));
|
||||
|
||||
(getFormattedDependentServicesData as jest.Mock).mockReturnValue(moreItems);
|
||||
|
||||
const mockData = {
|
||||
payload: { data: { result: [{ table: { rows: [] } }] } },
|
||||
} as SuccessResponse<any>;
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Render the component
|
||||
render(
|
||||
<DependentServices
|
||||
dependentServicesQuery={mockQuery as any}
|
||||
timeRange={mockTimeRange}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Find the "Show more" button (using container query since it might not have a testId)
|
||||
const showMoreButton = screen.getByText(/Show more/i);
|
||||
expect(showMoreButton).toBeInTheDocument();
|
||||
|
||||
// Initial page size should be 5
|
||||
expect(screen.getByTestId('page-size')).toHaveTextContent('5');
|
||||
|
||||
// Click the button to expand
|
||||
fireEvent.click(showMoreButton);
|
||||
|
||||
// Page size should now be the full data length
|
||||
expect(screen.getByTestId('page-size')).toHaveTextContent('8');
|
||||
|
||||
// Text should have changed to "Show less"
|
||||
expect(screen.getByText(/Show less/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,386 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import {
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||
extractPortAndEndpoint,
|
||||
getEndPointDetailsQueryPayload,
|
||||
getLatencyOverTimeWidgetData,
|
||||
getRateOverTimeWidgetData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import {
|
||||
CustomTimeType,
|
||||
Time,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { useQueries } from 'react-query';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
TagFilter,
|
||||
TagFilterItem,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
|
||||
import EndPointDetails from '../Explorer/Domains/DomainDetails/EndPointDetails';
|
||||
|
||||
// Mock dependencies
|
||||
jest.mock('react-query', () => ({
|
||||
useQueries: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY: [
|
||||
'endPointMetricsData',
|
||||
'endPointStatusCodeData',
|
||||
'endPointDropDownData',
|
||||
'endPointDependentServicesData',
|
||||
'endPointStatusCodeBarChartsData',
|
||||
'endPointStatusCodeLatencyBarChartsData',
|
||||
],
|
||||
extractPortAndEndpoint: jest.fn(),
|
||||
getEndPointDetailsQueryPayload: jest.fn(),
|
||||
getLatencyOverTimeWidgetData: jest.fn(),
|
||||
getRateOverTimeWidgetData: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock(
|
||||
'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(({ onChange }) => (
|
||||
<div data-testid="query-builder-search">
|
||||
<button
|
||||
type="button"
|
||||
data-testid="filter-change-button"
|
||||
onClick={(): void =>
|
||||
onChange({
|
||||
items: [
|
||||
{
|
||||
id: 'test-filter',
|
||||
key: {
|
||||
key: 'test.key',
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
},
|
||||
op: '=',
|
||||
value: 'test-value',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
})
|
||||
}
|
||||
>
|
||||
Change Filter
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}),
|
||||
);
|
||||
|
||||
// Mock all child components to simplify testing
|
||||
jest.mock(
|
||||
'../Explorer/Domains/DomainDetails/components/EndPointMetrics',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest
|
||||
.fn()
|
||||
.mockImplementation(() => (
|
||||
<div data-testid="endpoint-metrics">EndPoint Metrics</div>
|
||||
)),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'../Explorer/Domains/DomainDetails/components/EndPointsDropDown',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(({ setSelectedEndPointName }) => (
|
||||
<div data-testid="endpoints-dropdown">
|
||||
<button
|
||||
type="button"
|
||||
data-testid="select-endpoint-button"
|
||||
onClick={(): void => setSelectedEndPointName('/api/new-endpoint')}
|
||||
>
|
||||
Select Endpoint
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'../Explorer/Domains/DomainDetails/components/DependentServices',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest
|
||||
.fn()
|
||||
.mockImplementation(() => (
|
||||
<div data-testid="dependent-services">Dependent Services</div>
|
||||
)),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'../Explorer/Domains/DomainDetails/components/StatusCodeBarCharts',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest
|
||||
.fn()
|
||||
.mockImplementation(() => (
|
||||
<div data-testid="status-code-bar-charts">Status Code Bar Charts</div>
|
||||
)),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'../Explorer/Domains/DomainDetails/components/StatusCodeTable',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest
|
||||
.fn()
|
||||
.mockImplementation(() => (
|
||||
<div data-testid="status-code-table">Status Code Table</div>
|
||||
)),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'../Explorer/Domains/DomainDetails/components/MetricOverTimeGraph',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest
|
||||
.fn()
|
||||
.mockImplementation(({ widget }) => (
|
||||
<div data-testid={`metric-graph-${widget.title}`}>{widget.title} Graph</div>
|
||||
)),
|
||||
}),
|
||||
);
|
||||
|
||||
describe('EndPointDetails Component', () => {
|
||||
const mockQueryResults = Array(6).fill({
|
||||
data: { data: [] },
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
error: null,
|
||||
});
|
||||
|
||||
const mockProps = {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
domainName: 'test-domain',
|
||||
endPointName: '/api/test',
|
||||
setSelectedEndPointName: jest.fn(),
|
||||
initialFilters: { items: [], op: 'AND' } as TagFilter,
|
||||
timeRange: {
|
||||
startTime: 1609459200000,
|
||||
endTime: 1609545600000,
|
||||
},
|
||||
handleTimeChange: jest.fn() as (
|
||||
interval: Time | CustomTimeType,
|
||||
dateTimeRange?: [number, number],
|
||||
) => void,
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
(extractPortAndEndpoint as jest.Mock).mockReturnValue({
|
||||
port: '8080',
|
||||
endpoint: '/api/test',
|
||||
});
|
||||
|
||||
(getEndPointDetailsQueryPayload as jest.Mock).mockReturnValue([
|
||||
{ id: 'query1', label: 'Query 1' },
|
||||
{ id: 'query2', label: 'Query 2' },
|
||||
{ id: 'query3', label: 'Query 3' },
|
||||
{ id: 'query4', label: 'Query 4' },
|
||||
{ id: 'query5', label: 'Query 5' },
|
||||
{ id: 'query6', label: 'Query 6' },
|
||||
]);
|
||||
|
||||
(getRateOverTimeWidgetData as jest.Mock).mockReturnValue({
|
||||
title: 'Rate Over Time',
|
||||
id: 'rate-widget',
|
||||
});
|
||||
|
||||
(getLatencyOverTimeWidgetData as jest.Mock).mockReturnValue({
|
||||
title: 'Latency Over Time',
|
||||
id: 'latency-widget',
|
||||
});
|
||||
|
||||
(useQueries as jest.Mock).mockReturnValue(mockQueryResults);
|
||||
});
|
||||
|
||||
it('renders the component correctly', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointDetails {...mockProps} />);
|
||||
|
||||
// Check all major components are rendered
|
||||
expect(screen.getByTestId('query-builder-search')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('endpoints-dropdown')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('endpoint-metrics')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('dependent-services')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('status-code-bar-charts')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('status-code-table')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('metric-graph-Rate Over Time')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByTestId('metric-graph-Latency Over Time'),
|
||||
).toBeInTheDocument();
|
||||
|
||||
// Check endpoint metadata is displayed
|
||||
expect(screen.getByText(/8080/i)).toBeInTheDocument();
|
||||
expect(screen.getByText('/api/test')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls getEndPointDetailsQueryPayload with correct parameters', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointDetails {...mockProps} />);
|
||||
|
||||
expect(getEndPointDetailsQueryPayload).toHaveBeenCalledWith(
|
||||
'test-domain',
|
||||
mockProps.timeRange.startTime,
|
||||
mockProps.timeRange.endTime,
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
op: 'AND',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('adds endpoint filter to initial filters', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointDetails {...mockProps} />);
|
||||
|
||||
expect(getEndPointDetailsQueryPayload).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.anything(),
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('updates filters when QueryBuilderSearch changes', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointDetails {...mockProps} />);
|
||||
|
||||
// Trigger filter change
|
||||
fireEvent.click(screen.getByTestId('filter-change-button'));
|
||||
|
||||
// Check that filters were updated in subsequent calls to utility functions
|
||||
expect(getEndPointDetailsQueryPayload).toHaveBeenCalledTimes(2);
|
||||
expect(getEndPointDetailsQueryPayload).toHaveBeenLastCalledWith(
|
||||
expect.anything(),
|
||||
expect.anything(),
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: 'test.key' }),
|
||||
value: 'test-value',
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('handles endpoint dropdown selection', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointDetails {...mockProps} />);
|
||||
|
||||
// Trigger endpoint selection
|
||||
fireEvent.click(screen.getByTestId('select-endpoint-button'));
|
||||
|
||||
// Check if endpoint was updated
|
||||
expect(mockProps.setSelectedEndPointName).toHaveBeenCalledWith(
|
||||
'/api/new-endpoint',
|
||||
);
|
||||
});
|
||||
|
||||
it('does not display dependent services when service filter is applied', () => {
|
||||
const propsWithServiceFilter = {
|
||||
...mockProps,
|
||||
initialFilters: {
|
||||
items: [
|
||||
{
|
||||
id: 'service-filter',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
},
|
||||
op: '=',
|
||||
value: 'test-service',
|
||||
},
|
||||
] as TagFilterItem[],
|
||||
op: 'AND',
|
||||
} as TagFilter,
|
||||
};
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointDetails {...propsWithServiceFilter} />);
|
||||
|
||||
// Dependent services should not be displayed
|
||||
expect(screen.queryByTestId('dependent-services')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('passes the correct parameters to widget data generators', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointDetails {...mockProps} />);
|
||||
|
||||
expect(getRateOverTimeWidgetData).toHaveBeenCalledWith(
|
||||
'test-domain',
|
||||
'/api/test',
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
);
|
||||
|
||||
expect(getLatencyOverTimeWidgetData).toHaveBeenCalledWith(
|
||||
'test-domain',
|
||||
'/api/test',
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('generates correct query parameters for useQueries', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointDetails {...mockProps} />);
|
||||
|
||||
// Check if useQueries was called with correct parameters
|
||||
expect(useQueries).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
queryKey: expect.arrayContaining([END_POINT_DETAILS_QUERY_KEYS_ARRAY[0]]),
|
||||
}),
|
||||
expect.objectContaining({
|
||||
queryKey: expect.arrayContaining([END_POINT_DETAILS_QUERY_KEYS_ARRAY[1]]),
|
||||
}),
|
||||
// ... and so on for other queries
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,211 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { getFormattedEndPointMetricsData } from 'container/ApiMonitoring/utils';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import EndPointMetrics from '../Explorer/Domains/DomainDetails/components/EndPointMetrics';
|
||||
import ErrorState from '../Explorer/Domains/DomainDetails/components/ErrorState';
|
||||
|
||||
// Create a partial mock of the UseQueryResult interface for testing
|
||||
interface MockQueryResult {
|
||||
isLoading: boolean;
|
||||
isRefetching: boolean;
|
||||
isError: boolean;
|
||||
data?: any;
|
||||
refetch: () => void;
|
||||
}
|
||||
|
||||
// Mock the utils function
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
getFormattedEndPointMetricsData: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock the ErrorState component
|
||||
jest.mock('../Explorer/Domains/DomainDetails/components/ErrorState', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(({ refetch }) => (
|
||||
<div data-testid="error-state-mock">
|
||||
<button type="button" data-testid="refetch-button" onClick={refetch}>
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
// Mock antd components
|
||||
jest.mock('antd', () => {
|
||||
const originalModule = jest.requireActual('antd');
|
||||
return {
|
||||
...originalModule,
|
||||
Progress: jest
|
||||
.fn()
|
||||
.mockImplementation(() => <div data-testid="progress-bar-mock" />),
|
||||
Skeleton: {
|
||||
Button: jest
|
||||
.fn()
|
||||
.mockImplementation(() => <div data-testid="skeleton-button-mock" />),
|
||||
},
|
||||
Tooltip: jest
|
||||
.fn()
|
||||
.mockImplementation(({ children }) => (
|
||||
<div data-testid="tooltip-mock">{children}</div>
|
||||
)),
|
||||
Typography: {
|
||||
Text: jest.fn().mockImplementation(({ children, className }) => (
|
||||
<div data-testid={`typography-${className}`} className={className}>
|
||||
{children}
|
||||
</div>
|
||||
)),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
describe('EndPointMetrics', () => {
|
||||
// Common metric data to use in tests
|
||||
const mockMetricsData = {
|
||||
key: 'test-key',
|
||||
rate: '42',
|
||||
latency: 99,
|
||||
errorRate: 5.5,
|
||||
lastUsed: '5 minutes ago',
|
||||
};
|
||||
|
||||
// Basic props for tests
|
||||
const refetchFn = jest.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
(getFormattedEndPointMetricsData as jest.Mock).mockReturnValue(
|
||||
mockMetricsData,
|
||||
);
|
||||
});
|
||||
|
||||
it('renders loading state correctly', () => {
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: true,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Verify skeleton loaders are visible
|
||||
const skeletonElements = screen.getAllByTestId('skeleton-button-mock');
|
||||
expect(skeletonElements.length).toBe(4);
|
||||
|
||||
// Verify labels are visible even during loading
|
||||
expect(screen.getByText('Rate')).toBeInTheDocument();
|
||||
expect(screen.getByText('AVERAGE LATENCY')).toBeInTheDocument();
|
||||
expect(screen.getByText('ERROR %')).toBeInTheDocument();
|
||||
expect(screen.getByText('LAST USED')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders error state correctly', () => {
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: true,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Verify error state is shown
|
||||
expect(screen.getByTestId('error-state-mock')).toBeInTheDocument();
|
||||
expect(ErrorState).toHaveBeenCalledWith(
|
||||
{ refetch: expect.any(Function) },
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('renders data correctly when loaded', () => {
|
||||
const mockData = {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{ data: { A: '42', B: '99000000', D: '1609459200000000', F1: '5.5' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Verify the utils function was called with the data
|
||||
expect(getFormattedEndPointMetricsData).toHaveBeenCalledWith(
|
||||
mockData.payload.data.result[0].table.rows,
|
||||
);
|
||||
|
||||
// Verify data is displayed
|
||||
expect(
|
||||
screen.getByText(`${mockMetricsData.rate} ops/sec`),
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByText(`${mockMetricsData.latency}ms`)).toBeInTheDocument();
|
||||
expect(screen.getByText(mockMetricsData.lastUsed)).toBeInTheDocument();
|
||||
expect(screen.getByTestId('progress-bar-mock')).toBeInTheDocument(); // For error rate
|
||||
});
|
||||
|
||||
it('handles refetching state correctly', () => {
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: true,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Verify skeleton loaders are visible during refetching
|
||||
const skeletonElements = screen.getAllByTestId('skeleton-button-mock');
|
||||
expect(skeletonElements.length).toBe(4);
|
||||
});
|
||||
|
||||
it('handles null metrics data gracefully', () => {
|
||||
// Mock the utils function to return null to simulate missing data
|
||||
(getFormattedEndPointMetricsData as jest.Mock).mockReturnValue(null);
|
||||
|
||||
const mockData = {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Even with null data, the component should render without crashing
|
||||
expect(screen.getByText('Rate')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,221 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { getFormattedEndPointDropDownData } from 'container/ApiMonitoring/utils';
|
||||
|
||||
import EndPointsDropDown from '../Explorer/Domains/DomainDetails/components/EndPointsDropDown';
|
||||
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
|
||||
|
||||
// Mock the Select component from antd
|
||||
jest.mock('antd', () => {
|
||||
const originalModule = jest.requireActual('antd');
|
||||
return {
|
||||
...originalModule,
|
||||
Select: jest
|
||||
.fn()
|
||||
.mockImplementation(({ value, loading, onChange, options, onClear }) => (
|
||||
<div data-testid="mock-select">
|
||||
<div data-testid="select-value">{value}</div>
|
||||
<div data-testid="select-loading">
|
||||
{loading ? 'loading' : 'not-loading'}
|
||||
</div>
|
||||
<select
|
||||
data-testid="select-element"
|
||||
value={value || ''}
|
||||
onChange={(e): void => onChange(e.target.value)}
|
||||
>
|
||||
<option value="">Select...</option>
|
||||
{options?.map((option: { value: string; label: string; key: string }) => (
|
||||
<option key={option.value} value={option.value}>
|
||||
{option.label}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
<button data-testid="select-clear-button" type="button" onClick={onClear}>
|
||||
Clear
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the utilities
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
getFormattedEndPointDropDownData: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('EndPointsDropDown Component', () => {
|
||||
const mockEndPoints = [
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
{ key: '1', value: '/api/endpoint1', label: '/api/endpoint1' },
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
{ key: '2', value: '/api/endpoint2', label: '/api/endpoint2' },
|
||||
];
|
||||
|
||||
const mockSetSelectedEndPointName = jest.fn();
|
||||
|
||||
// Create a mock that satisfies the UseQueryResult interface
|
||||
const createMockQueryResult = (overrides: any = {}): any => ({
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
dataUpdatedAt: 0,
|
||||
error: null,
|
||||
errorUpdatedAt: 0,
|
||||
failureCount: 0,
|
||||
isError: false,
|
||||
isFetched: true,
|
||||
isFetchedAfterMount: true,
|
||||
isFetching: false,
|
||||
isIdle: false,
|
||||
isLoading: false,
|
||||
isLoadingError: false,
|
||||
isPlaceholderData: false,
|
||||
isPreviousData: false,
|
||||
isRefetchError: false,
|
||||
isRefetching: false,
|
||||
isStale: false,
|
||||
isSuccess: true,
|
||||
refetch: jest.fn(),
|
||||
remove: jest.fn(),
|
||||
status: 'success',
|
||||
...overrides,
|
||||
});
|
||||
|
||||
const defaultProps = {
|
||||
selectedEndPointName: '',
|
||||
setSelectedEndPointName: mockSetSelectedEndPointName,
|
||||
endPointDropDownDataQuery: createMockQueryResult(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
(getFormattedEndPointDropDownData as jest.Mock).mockReturnValue(
|
||||
mockEndPoints,
|
||||
);
|
||||
});
|
||||
|
||||
it('renders the component correctly', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointsDropDown {...defaultProps} />);
|
||||
|
||||
expect(screen.getByTestId('mock-select')).toBeInTheDocument();
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
expect(screen.getByTestId('select-loading')).toHaveTextContent('not-loading');
|
||||
});
|
||||
|
||||
it('shows loading state when data is loading', () => {
|
||||
const loadingProps = {
|
||||
...defaultProps,
|
||||
endPointDropDownDataQuery: createMockQueryResult({
|
||||
isLoading: true,
|
||||
}),
|
||||
};
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointsDropDown {...loadingProps} />);
|
||||
|
||||
expect(screen.getByTestId('select-loading')).toHaveTextContent('loading');
|
||||
});
|
||||
|
||||
it('shows loading state when data is fetching', () => {
|
||||
const fetchingProps = {
|
||||
...defaultProps,
|
||||
endPointDropDownDataQuery: createMockQueryResult({
|
||||
isFetching: true,
|
||||
}),
|
||||
};
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointsDropDown {...fetchingProps} />);
|
||||
|
||||
expect(screen.getByTestId('select-loading')).toHaveTextContent('loading');
|
||||
});
|
||||
|
||||
it('displays the selected endpoint', () => {
|
||||
const selectedProps = {
|
||||
...defaultProps,
|
||||
selectedEndPointName: '/api/endpoint1',
|
||||
};
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointsDropDown {...selectedProps} />);
|
||||
|
||||
expect(screen.getByTestId('select-value')).toHaveTextContent(
|
||||
'/api/endpoint1',
|
||||
);
|
||||
});
|
||||
|
||||
it('calls setSelectedEndPointName when an option is selected', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointsDropDown {...defaultProps} />);
|
||||
|
||||
// Get the select element and change its value
|
||||
const selectElement = screen.getByTestId('select-element');
|
||||
fireEvent.change(selectElement, { target: { value: '/api/endpoint2' } });
|
||||
|
||||
expect(mockSetSelectedEndPointName).toHaveBeenCalledWith('/api/endpoint2');
|
||||
});
|
||||
|
||||
it('calls setSelectedEndPointName with empty string when cleared', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointsDropDown {...defaultProps} />);
|
||||
|
||||
// Click the clear button
|
||||
const clearButton = screen.getByTestId('select-clear-button');
|
||||
fireEvent.click(clearButton);
|
||||
|
||||
expect(mockSetSelectedEndPointName).toHaveBeenCalledWith('');
|
||||
});
|
||||
|
||||
it('passes dropdown style prop correctly', () => {
|
||||
const styleProps = {
|
||||
...defaultProps,
|
||||
dropdownStyle: { maxHeight: '200px' },
|
||||
};
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointsDropDown {...styleProps} />);
|
||||
|
||||
// We can't easily test style props in our mock, but at least ensure the component rendered
|
||||
expect(screen.getByTestId('mock-select')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('formats data using the utility function', () => {
|
||||
const mockRows = [
|
||||
{ data: { [SPAN_ATTRIBUTES.URL_PATH]: '/api/test', A: 10 } },
|
||||
];
|
||||
|
||||
const dataProps = {
|
||||
...defaultProps,
|
||||
endPointDropDownDataQuery: createMockQueryResult({
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: mockRows,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<EndPointsDropDown {...dataProps} />);
|
||||
|
||||
expect(getFormattedEndPointDropDownData).toHaveBeenCalledWith(mockRows);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,493 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import {
|
||||
getCustomFiltersForBarChart,
|
||||
getFormattedEndPointStatusCodeChartData,
|
||||
getStatusCodeBarChartWidgetData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import ErrorState from '../Explorer/Domains/DomainDetails/components/ErrorState';
|
||||
import StatusCodeBarCharts from '../Explorer/Domains/DomainDetails/components/StatusCodeBarCharts';
|
||||
|
||||
// Create a partial mock of the UseQueryResult interface for testing
|
||||
interface MockQueryResult {
|
||||
isLoading: boolean;
|
||||
isRefetching: boolean;
|
||||
isError: boolean;
|
||||
error?: Error;
|
||||
data?: any;
|
||||
refetch: () => void;
|
||||
}
|
||||
|
||||
// Mocks
|
||||
jest.mock('components/Uplot', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(() => <div data-testid="uplot-mock" />),
|
||||
}));
|
||||
|
||||
jest.mock('components/CeleryTask/useGetGraphCustomSeries', () => ({
|
||||
useGetGraphCustomSeries: (): { getCustomSeries: jest.Mock } => ({
|
||||
getCustomSeries: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('components/CeleryTask/useNavigateToExplorer', () => ({
|
||||
useNavigateToExplorer: (): { navigateToExplorer: jest.Mock } => ({
|
||||
navigateToExplorer: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('container/GridCardLayout/useGraphClickToShowButton', () => ({
|
||||
useGraphClickToShowButton: (): {
|
||||
componentClick: boolean;
|
||||
htmlRef: HTMLElement | null;
|
||||
} => ({
|
||||
componentClick: false,
|
||||
htmlRef: null,
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('container/GridCardLayout/useNavigateToExplorerPages', () => ({
|
||||
__esModule: true,
|
||||
default: (): { navigateToExplorerPages: jest.Mock } => ({
|
||||
navigateToExplorerPages: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useDarkMode', () => ({
|
||||
useIsDarkMode: (): boolean => false,
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useDimensions', () => ({
|
||||
useResizeObserver: (): { width: number; height: number } => ({
|
||||
width: 800,
|
||||
height: 400,
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useNotifications', () => ({
|
||||
useNotifications: (): { notifications: [] } => ({ notifications: [] }),
|
||||
}));
|
||||
|
||||
jest.mock('lib/uPlotLib/getUplotChartOptions', () => ({
|
||||
getUPlotChartOptions: jest.fn().mockReturnValue({}),
|
||||
}));
|
||||
|
||||
jest.mock('lib/uPlotLib/utils/getUplotChartData', () => ({
|
||||
getUPlotChartData: jest.fn().mockReturnValue([]),
|
||||
}));
|
||||
|
||||
// Mock utility functions
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
getFormattedEndPointStatusCodeChartData: jest.fn(),
|
||||
getStatusCodeBarChartWidgetData: jest.fn(),
|
||||
getCustomFiltersForBarChart: jest.fn(),
|
||||
statusCodeWidgetInfo: [
|
||||
{ title: 'Status Code Count', yAxisUnit: 'count' },
|
||||
{ title: 'Status Code Latency', yAxisUnit: 'ms' },
|
||||
],
|
||||
}));
|
||||
|
||||
// Mock the ErrorState component
|
||||
jest.mock('../Explorer/Domains/DomainDetails/components/ErrorState', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(({ refetch }) => (
|
||||
<div data-testid="error-state-mock">
|
||||
<button type="button" data-testid="refetch-button" onClick={refetch}>
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
// Mock antd components
|
||||
jest.mock('antd', () => {
|
||||
const originalModule = jest.requireActual('antd');
|
||||
return {
|
||||
...originalModule,
|
||||
Card: jest.fn().mockImplementation(({ children, className }) => (
|
||||
<div data-testid="card-mock" className={className}>
|
||||
{children}
|
||||
</div>
|
||||
)),
|
||||
Typography: {
|
||||
Text: jest
|
||||
.fn()
|
||||
.mockImplementation(({ children }) => (
|
||||
<div data-testid="typography-text">{children}</div>
|
||||
)),
|
||||
},
|
||||
Button: {
|
||||
...originalModule.Button,
|
||||
Group: jest.fn().mockImplementation(({ children, className }) => (
|
||||
<div data-testid="button-group" className={className}>
|
||||
{children}
|
||||
</div>
|
||||
)),
|
||||
},
|
||||
Skeleton: jest
|
||||
.fn()
|
||||
.mockImplementation(() => (
|
||||
<div data-testid="skeleton-mock">Loading skeleton...</div>
|
||||
)),
|
||||
};
|
||||
});
|
||||
|
||||
describe('StatusCodeBarCharts', () => {
|
||||
// Default props for tests
|
||||
const mockFilters: IBuilderQuery['filters'] = { items: [], op: 'AND' };
|
||||
const mockTimeRange = {
|
||||
startTime: 1609459200000,
|
||||
endTime: 1609545600000,
|
||||
};
|
||||
const mockDomainName = 'test-domain';
|
||||
const mockEndPointName = '/api/test';
|
||||
const onDragSelectMock = jest.fn();
|
||||
const refetchFn = jest.fn();
|
||||
|
||||
// Mock formatted data
|
||||
const mockFormattedData = {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
values: [[1609459200, 10]],
|
||||
metric: { statusCode: '200-299' },
|
||||
queryName: 'A',
|
||||
},
|
||||
{
|
||||
values: [[1609459200, 5]],
|
||||
metric: { statusCode: '400-499' },
|
||||
queryName: 'B',
|
||||
},
|
||||
],
|
||||
resultType: 'matrix',
|
||||
},
|
||||
};
|
||||
|
||||
// Mock filter values
|
||||
const mockStatusCodeFilters = [
|
||||
{
|
||||
id: 'test-id-1',
|
||||
key: {
|
||||
dataType: 'string',
|
||||
id: 'response_status_code--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'response_status_code',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '>=',
|
||||
value: '200',
|
||||
},
|
||||
{
|
||||
id: 'test-id-2',
|
||||
key: {
|
||||
dataType: 'string',
|
||||
id: 'response_status_code--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'response_status_code',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '<=',
|
||||
value: '299',
|
||||
},
|
||||
];
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
(getFormattedEndPointStatusCodeChartData as jest.Mock).mockReturnValue(
|
||||
mockFormattedData,
|
||||
);
|
||||
(getStatusCodeBarChartWidgetData as jest.Mock).mockReturnValue({
|
||||
id: 'test-widget',
|
||||
title: 'Status Code',
|
||||
description: 'Shows status code distribution',
|
||||
query: { builder: { queryData: [] } },
|
||||
panelTypes: 'bar',
|
||||
});
|
||||
(getCustomFiltersForBarChart as jest.Mock).mockReturnValue(
|
||||
mockStatusCodeFilters,
|
||||
);
|
||||
});
|
||||
|
||||
it('renders loading state correctly', () => {
|
||||
// Arrange
|
||||
const mockStatusCodeQuery: MockQueryResult = {
|
||||
isLoading: true,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
const mockLatencyQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<StatusCodeBarCharts
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('skeleton-mock')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders error state correctly', () => {
|
||||
// Arrange
|
||||
const mockStatusCodeQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: true,
|
||||
error: new Error('Test error'),
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
const mockLatencyQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<StatusCodeBarCharts
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('error-state-mock')).toBeInTheDocument();
|
||||
expect(ErrorState).toHaveBeenCalledWith(
|
||||
{ refetch: expect.any(Function) },
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('renders chart data correctly when loaded', () => {
|
||||
// Arrange
|
||||
const mockData = {
|
||||
payload: mockFormattedData,
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockStatusCodeQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
const mockLatencyQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<StatusCodeBarCharts
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(getFormattedEndPointStatusCodeChartData).toHaveBeenCalledWith(
|
||||
mockData.payload,
|
||||
'sum',
|
||||
);
|
||||
expect(screen.getByTestId('uplot-mock')).toBeInTheDocument();
|
||||
expect(screen.getByText('Number of calls')).toBeInTheDocument();
|
||||
expect(screen.getByText('Latency')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('switches between number of calls and latency views', () => {
|
||||
// Arrange
|
||||
const mockData = {
|
||||
payload: mockFormattedData,
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockStatusCodeQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
const mockLatencyQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<StatusCodeBarCharts
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Initially should be showing number of calls (index 0)
|
||||
const latencyButton = screen.getByText('Latency');
|
||||
|
||||
// Click to switch to latency view
|
||||
fireEvent.click(latencyButton);
|
||||
|
||||
// Should now format with the latency data
|
||||
expect(getFormattedEndPointStatusCodeChartData).toHaveBeenCalledWith(
|
||||
mockData.payload,
|
||||
'average',
|
||||
);
|
||||
});
|
||||
|
||||
it('uses getCustomFiltersForBarChart when needed', () => {
|
||||
// Arrange
|
||||
const mockData = {
|
||||
payload: mockFormattedData,
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockStatusCodeQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
const mockLatencyQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<StatusCodeBarCharts
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert
|
||||
// Initially getCustomFiltersForBarChart won't be called until a graph click event
|
||||
expect(getCustomFiltersForBarChart).not.toHaveBeenCalled();
|
||||
|
||||
// We can't easily test the graph click handler directly,
|
||||
// but we've confirmed the function is mocked and ready to be tested
|
||||
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
|
||||
mockDomainName,
|
||||
mockEndPointName,
|
||||
expect.objectContaining({
|
||||
items: [],
|
||||
op: 'AND',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('handles widget generation with current filters', () => {
|
||||
// Arrange
|
||||
const mockCustomFilters = {
|
||||
items: [
|
||||
{
|
||||
id: 'custom-filter',
|
||||
key: { key: 'test-key' },
|
||||
op: '=',
|
||||
value: 'test-value',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const mockData = {
|
||||
payload: mockFormattedData,
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockStatusCodeQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
const mockLatencyQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(
|
||||
<StatusCodeBarCharts
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockCustomFilters as IBuilderQuery['filters']}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert widget creation was called with the correct parameters
|
||||
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
|
||||
mockDomainName,
|
||||
mockEndPointName,
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({ id: 'custom-filter' }),
|
||||
]),
|
||||
op: 'AND',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,175 @@
|
||||
import '@testing-library/jest-dom';
|
||||
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import StatusCodeTable from '../Explorer/Domains/DomainDetails/components/StatusCodeTable';
|
||||
|
||||
// Mock the ErrorState component
|
||||
jest.mock('../Explorer/Domains/DomainDetails/components/ErrorState', () =>
|
||||
jest.fn().mockImplementation(({ refetch }) => (
|
||||
<div
|
||||
data-testid="error-state-mock"
|
||||
onClick={refetch}
|
||||
onKeyDown={(e: React.KeyboardEvent<HTMLDivElement>): void => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
refetch();
|
||||
}
|
||||
}}
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
>
|
||||
Error state
|
||||
</div>
|
||||
)),
|
||||
);
|
||||
|
||||
// Mock antd components
|
||||
jest.mock('antd', () => {
|
||||
const originalModule = jest.requireActual('antd');
|
||||
return {
|
||||
...originalModule,
|
||||
Table: jest
|
||||
.fn()
|
||||
.mockImplementation(({ loading, dataSource, columns, locale }) => (
|
||||
<div data-testid="table-mock">
|
||||
{loading && <div data-testid="loading-indicator">Loading...</div>}
|
||||
{dataSource &&
|
||||
dataSource.length === 0 &&
|
||||
!loading &&
|
||||
locale?.emptyText && (
|
||||
<div data-testid="empty-table">{locale.emptyText}</div>
|
||||
)}
|
||||
{dataSource && dataSource.length > 0 && (
|
||||
<div data-testid="table-data">
|
||||
Data loaded with {dataSource.length} rows and {columns.length} columns
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)),
|
||||
Typography: {
|
||||
Text: jest.fn().mockImplementation(({ children, className }) => (
|
||||
<div data-testid="typography-text" className={className}>
|
||||
{children}
|
||||
</div>
|
||||
)),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
// Create a mock query result type
|
||||
interface MockQueryResult {
|
||||
isLoading: boolean;
|
||||
isRefetching: boolean;
|
||||
isError: boolean;
|
||||
error?: Error;
|
||||
data?: any;
|
||||
refetch: () => void;
|
||||
}
|
||||
|
||||
describe('StatusCodeTable', () => {
|
||||
const refetchFn = jest.fn();
|
||||
|
||||
it('renders loading state correctly', () => {
|
||||
// Arrange
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: true,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<StatusCodeTable endPointStatusCodeDataQuery={mockQuery as any} />);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('loading-indicator')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders error state correctly', () => {
|
||||
// Arrange
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: true,
|
||||
error: new Error('Test error'),
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<StatusCodeTable endPointStatusCodeDataQuery={mockQuery as any} />);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('error-state-mock')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders empty state when no data is available', () => {
|
||||
// Arrange
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<StatusCodeTable endPointStatusCodeDataQuery={mockQuery as any} />);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('empty-table')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders table data correctly when data is available', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
response_status_code: '200',
|
||||
A: '150', // count
|
||||
B: '10000000', // latency in nanoseconds
|
||||
C: '5', // rate
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: mockData,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<StatusCodeTable endPointStatusCodeDataQuery={mockQuery as any} />);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('table-data')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,374 @@
|
||||
import { fireEvent, render, screen, within } from '@testing-library/react';
|
||||
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
formatTopErrorsDataForTable,
|
||||
getEndPointDetailsQueryPayload,
|
||||
getTopErrorsColumnsConfig,
|
||||
getTopErrorsCoRelationQueryFilters,
|
||||
getTopErrorsQueryPayload,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { useQueries } from 'react-query';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import TopErrors from '../Explorer/Domains/DomainDetails/TopErrors';
|
||||
|
||||
// Mock the EndPointsDropDown component to avoid issues
|
||||
jest.mock(
|
||||
'../Explorer/Domains/DomainDetails/components/EndPointsDropDown',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(
|
||||
({ setSelectedEndPointName }): JSX.Element => (
|
||||
<div data-testid="endpoints-dropdown-mock">
|
||||
<select
|
||||
data-testid="endpoints-select"
|
||||
onChange={(e): void => setSelectedEndPointName(e.target.value)}
|
||||
role="combobox"
|
||||
>
|
||||
<option value="/api/test">/api/test</option>
|
||||
<option value="/api/new-endpoint">/api/new-endpoint</option>
|
||||
</select>
|
||||
</div>
|
||||
),
|
||||
),
|
||||
}),
|
||||
);
|
||||
|
||||
// Mock dependencies
|
||||
jest.mock('react-query', () => ({
|
||||
useQueries: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('components/CeleryTask/useNavigateToExplorer', () => ({
|
||||
useNavigateToExplorer: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY: ['key1', 'key2', 'key3', 'key4', 'key5'],
|
||||
formatTopErrorsDataForTable: jest.fn(),
|
||||
getEndPointDetailsQueryPayload: jest.fn(),
|
||||
getTopErrorsColumnsConfig: jest.fn(),
|
||||
getTopErrorsCoRelationQueryFilters: jest.fn(),
|
||||
getTopErrorsQueryPayload: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('TopErrors', () => {
|
||||
const mockProps = {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
domainName: 'test-domain',
|
||||
timeRange: {
|
||||
startTime: 1000000000,
|
||||
endTime: 1000010000,
|
||||
},
|
||||
initialFilters: {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
},
|
||||
};
|
||||
|
||||
// Setup basic mocks
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Mock getTopErrorsColumnsConfig
|
||||
(getTopErrorsColumnsConfig as jest.Mock).mockReturnValue([
|
||||
{
|
||||
title: 'Endpoint',
|
||||
dataIndex: 'endpointName',
|
||||
key: 'endpointName',
|
||||
},
|
||||
{
|
||||
title: 'Status Code',
|
||||
dataIndex: 'statusCode',
|
||||
key: 'statusCode',
|
||||
},
|
||||
{
|
||||
title: 'Status Message',
|
||||
dataIndex: 'statusMessage',
|
||||
key: 'statusMessage',
|
||||
},
|
||||
{
|
||||
title: 'Count',
|
||||
dataIndex: 'count',
|
||||
key: 'count',
|
||||
},
|
||||
]);
|
||||
|
||||
// Mock useQueries
|
||||
(useQueries as jest.Mock).mockImplementation((queryConfigs) => {
|
||||
// For topErrorsDataQueries
|
||||
if (
|
||||
queryConfigs.length === 1 &&
|
||||
queryConfigs[0].queryKey &&
|
||||
queryConfigs[0].queryKey[0] === REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN
|
||||
) {
|
||||
return [
|
||||
{
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
metric: {
|
||||
'http.url': '/api/test',
|
||||
status_code: '500',
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
status_message: 'Internal Server Error',
|
||||
},
|
||||
values: [[1000000100, '10']],
|
||||
queryName: 'A',
|
||||
legend: 'Test Legend',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
refetch: jest.fn(),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
// For endPointDropDownDataQueries
|
||||
return [
|
||||
{
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{
|
||||
'http.url': '/api/test',
|
||||
A: 100,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
},
|
||||
];
|
||||
});
|
||||
|
||||
// Mock formatTopErrorsDataForTable
|
||||
(formatTopErrorsDataForTable as jest.Mock).mockReturnValue([
|
||||
{
|
||||
key: '1',
|
||||
endpointName: '/api/test',
|
||||
statusCode: '500',
|
||||
statusMessage: 'Internal Server Error',
|
||||
count: 10,
|
||||
},
|
||||
]);
|
||||
|
||||
// Mock getTopErrorsQueryPayload
|
||||
(getTopErrorsQueryPayload as jest.Mock).mockReturnValue([
|
||||
{
|
||||
queryName: 'TopErrorsQuery',
|
||||
start: mockProps.timeRange.startTime,
|
||||
end: mockProps.timeRange.endTime,
|
||||
step: 60,
|
||||
},
|
||||
]);
|
||||
|
||||
// Mock getEndPointDetailsQueryPayload
|
||||
(getEndPointDetailsQueryPayload as jest.Mock).mockReturnValue([
|
||||
{},
|
||||
{},
|
||||
{
|
||||
queryName: 'EndpointDropdownQuery',
|
||||
start: mockProps.timeRange.startTime,
|
||||
end: mockProps.timeRange.endTime,
|
||||
step: 60,
|
||||
},
|
||||
]);
|
||||
|
||||
// Mock useNavigateToExplorer
|
||||
(useNavigateToExplorer as jest.Mock).mockReturnValue(jest.fn());
|
||||
|
||||
// Mock getTopErrorsCoRelationQueryFilters
|
||||
(getTopErrorsCoRelationQueryFilters as jest.Mock).mockReturnValue({
|
||||
items: [
|
||||
{ id: 'test1', key: { key: 'domain' }, op: '=', value: 'test-domain' },
|
||||
{ id: 'test2', key: { key: 'endpoint' }, op: '=', value: '/api/test' },
|
||||
{ id: 'test3', key: { key: 'status' }, op: '=', value: '500' },
|
||||
],
|
||||
op: 'AND',
|
||||
});
|
||||
});
|
||||
|
||||
it('renders component correctly', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
const { container } = render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Check if the title and toggle are rendered
|
||||
expect(screen.getByText('Errors with Status Message')).toBeInTheDocument();
|
||||
expect(screen.getByText('Status Message Exists')).toBeInTheDocument();
|
||||
|
||||
// Find the table row and verify content
|
||||
const tableBody = container.querySelector('.ant-table-tbody');
|
||||
expect(tableBody).not.toBeNull();
|
||||
|
||||
if (tableBody) {
|
||||
const row = within(tableBody as HTMLElement).getByRole('row');
|
||||
expect(within(row).getByText('/api/test')).toBeInTheDocument();
|
||||
expect(within(row).getByText('500')).toBeInTheDocument();
|
||||
expect(within(row).getByText('Internal Server Error')).toBeInTheDocument();
|
||||
}
|
||||
});
|
||||
|
||||
it('renders error state when isError is true', () => {
|
||||
// Mock useQueries to return isError: true
|
||||
(useQueries as jest.Mock).mockImplementationOnce(() => [
|
||||
{
|
||||
isError: true,
|
||||
refetch: jest.fn(),
|
||||
},
|
||||
]);
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Error state should be shown with the actual text displayed in the UI
|
||||
expect(
|
||||
screen.getByText('Uh-oh :/ We ran into an error.'),
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByText('Please refresh this panel.')).toBeInTheDocument();
|
||||
expect(screen.getByText('Refresh this panel')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('handles row click correctly', () => {
|
||||
const navigateMock = jest.fn();
|
||||
(useNavigateToExplorer as jest.Mock).mockReturnValue(navigateMock);
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
const { container } = render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Find and click on the table cell containing the endpoint
|
||||
const tableBody = container.querySelector('.ant-table-tbody');
|
||||
expect(tableBody).not.toBeNull();
|
||||
|
||||
if (tableBody) {
|
||||
const row = within(tableBody as HTMLElement).getByRole('row');
|
||||
const cellWithEndpoint = within(row).getByText('/api/test');
|
||||
fireEvent.click(cellWithEndpoint);
|
||||
}
|
||||
|
||||
// Check if navigateToExplorer was called with correct params
|
||||
expect(navigateMock).toHaveBeenCalledWith({
|
||||
filters: [
|
||||
{ id: 'test1', key: { key: 'domain' }, op: '=', value: 'test-domain' },
|
||||
{ id: 'test2', key: { key: 'endpoint' }, op: '=', value: '/api/test' },
|
||||
{ id: 'test3', key: { key: 'status' }, op: '=', value: '500' },
|
||||
],
|
||||
dataSource: DataSource.TRACES,
|
||||
startTime: mockProps.timeRange.startTime,
|
||||
endTime: mockProps.timeRange.endTime,
|
||||
shouldResolveQuery: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('updates endpoint filter when dropdown value changes', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Find the dropdown
|
||||
const dropdown = screen.getByRole('combobox');
|
||||
|
||||
// Mock the change
|
||||
fireEvent.change(dropdown, { target: { value: '/api/new-endpoint' } });
|
||||
|
||||
// Check if getTopErrorsQueryPayload was called with updated parameters
|
||||
expect(getTopErrorsQueryPayload).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('handles status message toggle correctly', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Find the toggle switch
|
||||
const toggle = screen.getByRole('switch');
|
||||
expect(toggle).toBeInTheDocument();
|
||||
|
||||
// Toggle should be on by default
|
||||
expect(toggle).toHaveAttribute('aria-checked', 'true');
|
||||
|
||||
// Click the toggle to turn it off
|
||||
fireEvent.click(toggle);
|
||||
|
||||
// Check if getTopErrorsQueryPayload was called with showStatusCodeErrors=false
|
||||
expect(getTopErrorsQueryPayload).toHaveBeenCalledWith(
|
||||
mockProps.domainName,
|
||||
mockProps.timeRange.startTime,
|
||||
mockProps.timeRange.endTime,
|
||||
expect.any(Object),
|
||||
false,
|
||||
);
|
||||
|
||||
// Title should change
|
||||
expect(screen.getByText('All Errors')).toBeInTheDocument();
|
||||
|
||||
// Click the toggle to turn it back on
|
||||
fireEvent.click(toggle);
|
||||
|
||||
// Check if getTopErrorsQueryPayload was called with showStatusCodeErrors=true
|
||||
expect(getTopErrorsQueryPayload).toHaveBeenCalledWith(
|
||||
mockProps.domainName,
|
||||
mockProps.timeRange.startTime,
|
||||
mockProps.timeRange.endTime,
|
||||
expect.any(Object),
|
||||
true,
|
||||
);
|
||||
|
||||
// Title should change back
|
||||
expect(screen.getByText('Errors with Status Message')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('includes toggle state in query key for cache busting', () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
const toggle = screen.getByRole('switch');
|
||||
|
||||
// Initial query should include showStatusCodeErrors=true
|
||||
expect(useQueries).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
queryKey: expect.arrayContaining([
|
||||
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
|
||||
expect.any(Object),
|
||||
expect.any(String),
|
||||
true,
|
||||
]),
|
||||
}),
|
||||
]),
|
||||
);
|
||||
|
||||
// Click toggle
|
||||
fireEvent.click(toggle);
|
||||
|
||||
// Query should be called with showStatusCodeErrors=false in key
|
||||
expect(useQueries).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
queryKey: expect.arrayContaining([
|
||||
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
|
||||
expect.any(Object),
|
||||
expect.any(String),
|
||||
false,
|
||||
]),
|
||||
}),
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
@@ -56,6 +56,7 @@ function WidgetGraphComponent({
|
||||
onOpenTraceBtnClick,
|
||||
customSeries,
|
||||
customErrorMessage,
|
||||
customOnRowClick,
|
||||
}: WidgetGraphComponentProps): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const [deleteModal, setDeleteModal] = useState(false);
|
||||
@@ -380,6 +381,7 @@ function WidgetGraphComponent({
|
||||
openTracesButton={openTracesButton}
|
||||
onOpenTraceBtnClick={onOpenTraceBtnClick}
|
||||
customSeries={customSeries}
|
||||
customOnRowClick={customOnRowClick}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -47,6 +47,8 @@ function GridCardGraph({
|
||||
start,
|
||||
end,
|
||||
analyticsEvent,
|
||||
customTimeRange,
|
||||
customOnRowClick,
|
||||
}: GridCardGraphProps): JSX.Element {
|
||||
const dispatch = useDispatch();
|
||||
const [errorMessage, setErrorMessage] = useState<string>();
|
||||
@@ -130,6 +132,8 @@ function GridCardGraph({
|
||||
variables: getDashboardVariables(variables),
|
||||
fillGaps: widget.fillSpans,
|
||||
formatForWeb: widget.panelTypes === PANEL_TYPES.TABLE,
|
||||
start: customTimeRange?.startTime || start,
|
||||
end: customTimeRange?.endTime || end,
|
||||
};
|
||||
}
|
||||
updatedQuery.builder.queryData[0].pageSize = 10;
|
||||
@@ -149,6 +153,8 @@ function GridCardGraph({
|
||||
initialDataSource === DataSource.TRACES && widget.selectedTracesFields,
|
||||
},
|
||||
fillGaps: widget.fillSpans,
|
||||
start: customTimeRange?.startTime || start,
|
||||
end: customTimeRange?.endTime || end,
|
||||
};
|
||||
});
|
||||
|
||||
@@ -187,8 +193,8 @@ function GridCardGraph({
|
||||
variables: getDashboardVariables(variables),
|
||||
selectedTime: widget.timePreferance || 'GLOBAL_TIME',
|
||||
globalSelectedInterval,
|
||||
start,
|
||||
end,
|
||||
start: customTimeRange?.startTime || start,
|
||||
end: customTimeRange?.endTime || end,
|
||||
},
|
||||
version || DEFAULT_ENTITY_VERSION,
|
||||
{
|
||||
@@ -202,6 +208,9 @@ function GridCardGraph({
|
||||
widget.timePreferance,
|
||||
widget.fillSpans,
|
||||
requestData,
|
||||
...(customTimeRange && customTimeRange.startTime && customTimeRange.endTime
|
||||
? [customTimeRange.startTime, customTimeRange.endTime]
|
||||
: []),
|
||||
],
|
||||
retry(failureCount, error): boolean {
|
||||
if (
|
||||
@@ -279,6 +288,7 @@ function GridCardGraph({
|
||||
onOpenTraceBtnClick={onOpenTraceBtnClick}
|
||||
customSeries={customSeries}
|
||||
customErrorMessage={isInternalServerError ? customErrorMessage : undefined}
|
||||
customOnRowClick={customOnRowClick}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -39,6 +39,7 @@ export interface WidgetGraphComponentProps {
|
||||
onOpenTraceBtnClick?: (record: RowData) => void;
|
||||
customSeries?: (data: QueryData[]) => uPlot.Series[];
|
||||
customErrorMessage?: string;
|
||||
customOnRowClick?: (record: RowData) => void;
|
||||
}
|
||||
|
||||
export interface GridCardGraphProps {
|
||||
@@ -61,6 +62,11 @@ export interface GridCardGraphProps {
|
||||
start?: number;
|
||||
end?: number;
|
||||
analyticsEvent?: string;
|
||||
customTimeRange?: {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
};
|
||||
customOnRowClick?: (record: RowData) => void;
|
||||
}
|
||||
|
||||
export interface GetGraphVisibilityStateOnLegendClickProps {
|
||||
|
||||
@@ -43,6 +43,7 @@ function GridTableComponent({
|
||||
sticky,
|
||||
openTracesButton,
|
||||
onOpenTraceBtnClick,
|
||||
customOnRowClick,
|
||||
widgetId,
|
||||
...props
|
||||
}: GridTableComponentProps): JSX.Element {
|
||||
@@ -214,6 +215,30 @@ function GridTableComponent({
|
||||
[newColumnData],
|
||||
);
|
||||
|
||||
const newColumnsWithRenderColumnCell = useMemo(
|
||||
() =>
|
||||
newColumnData.map((column) => ({
|
||||
...column,
|
||||
...('dataIndex' in column &&
|
||||
props.renderColumnCell?.[column.dataIndex as string]
|
||||
? { render: props.renderColumnCell[column.dataIndex as string] }
|
||||
: {}),
|
||||
})),
|
||||
[newColumnData, props.renderColumnCell],
|
||||
);
|
||||
|
||||
const newColumnsWithCustomColTitles = useMemo(
|
||||
() =>
|
||||
newColumnsWithRenderColumnCell.map((column) => ({
|
||||
...column,
|
||||
...('dataIndex' in column &&
|
||||
props.customColTitles?.[column.dataIndex as string]
|
||||
? { title: props.customColTitles[column.dataIndex as string] }
|
||||
: {}),
|
||||
})),
|
||||
[newColumnsWithRenderColumnCell, props.customColTitles],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
eventEmitter.emit(Events.TABLE_COLUMNS_DATA, {
|
||||
columns: newColumnData,
|
||||
@@ -227,15 +252,22 @@ function GridTableComponent({
|
||||
query={query}
|
||||
queryTableData={data}
|
||||
loading={false}
|
||||
columns={openTracesButton ? columnDataWithOpenTracesButton : newColumnData}
|
||||
columns={
|
||||
openTracesButton
|
||||
? columnDataWithOpenTracesButton
|
||||
: newColumnsWithCustomColTitles
|
||||
}
|
||||
dataSource={dataSource}
|
||||
sticky={sticky}
|
||||
widgetId={widgetId}
|
||||
onRow={
|
||||
openTracesButton
|
||||
openTracesButton || customOnRowClick
|
||||
? (record): React.HTMLAttributes<HTMLElement> => ({
|
||||
onClick: (): void => {
|
||||
onOpenTraceBtnClick?.(record);
|
||||
if (openTracesButton) {
|
||||
onOpenTraceBtnClick?.(record);
|
||||
}
|
||||
customOnRowClick?.(record);
|
||||
},
|
||||
})
|
||||
: undefined
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
ThresholdOperators,
|
||||
ThresholdProps,
|
||||
} from 'container/NewWidget/RightContainer/Threshold/types';
|
||||
import { QueryTableProps } from 'container/QueryTable/QueryTable.intefaces';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { ColumnUnit } from 'types/api/dashboard/getAll';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -17,7 +18,10 @@ export type GridTableComponentProps = {
|
||||
searchTerm?: string;
|
||||
openTracesButton?: boolean;
|
||||
onOpenTraceBtnClick?: (record: RowData) => void;
|
||||
customOnRowClick?: (record: RowData) => void;
|
||||
widgetId?: string;
|
||||
renderColumnCell?: QueryTableProps['renderColumnCell'];
|
||||
customColTitles?: Record<string, string>;
|
||||
} & Pick<LogsExplorerTableProps, 'data'> &
|
||||
Omit<TableProps<RowData>, 'columns' | 'dataSource'>;
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
import './Home.styles.scss';
|
||||
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Alert, Button, Popover } from 'antd';
|
||||
import { Button, Popover } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { HostListPayload } from 'api/infraMonitoring/getHostLists';
|
||||
import { K8sPodsListPayload } from 'api/infraMonitoring/getK8sPodsList';
|
||||
@@ -644,16 +644,6 @@ export default function Home(): JSX.Element {
|
||||
</div>
|
||||
|
||||
<div className="home-right-content">
|
||||
<div className="home-notifications-container">
|
||||
<div className="notification">
|
||||
<Alert
|
||||
message="We're transitioning alert rule IDs from integers to UUIDs on April 23, 2025. Both old and new alert links will continue to work after this change - existing notifications using integer IDs will remain functional while new alerts will use the UUID format."
|
||||
type="info"
|
||||
showIcon
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{!isWelcomeChecklistSkipped && !loadingUserPreferences && (
|
||||
<AnimatePresence initial={false}>
|
||||
<Card className="checklist-card">
|
||||
|
||||
@@ -451,6 +451,9 @@ function OnboardingAddDataSource(): JSX.Element {
|
||||
case 'integrations':
|
||||
history.push(ROUTES.INTEGRATIONS);
|
||||
break;
|
||||
case 'home':
|
||||
history.push(ROUTES.HOME);
|
||||
break;
|
||||
default:
|
||||
history.push(ROUTES.APPLICATION);
|
||||
}
|
||||
|
||||
@@ -12,6 +12,34 @@
|
||||
"imgUrl": "/Logos/quickstart.svg",
|
||||
"link": "https://signoz.io/docs/cloud/quickstart/"
|
||||
},
|
||||
{
|
||||
"dataSource": "migrate-from-datadog",
|
||||
"label": "From Datadog",
|
||||
"tags": ["migrate to SigNoz"],
|
||||
"module": "home",
|
||||
"relatedSearchKeywords": [
|
||||
"datadog",
|
||||
"opentelemetry"
|
||||
],
|
||||
"imgUrl": "/Logos/datadog.svg",
|
||||
"link": "https://signoz.io/docs/migration/migrate-from-datadog/"
|
||||
},
|
||||
{
|
||||
"dataSource": "migrate-from-lgtm",
|
||||
"label": "From Grafana",
|
||||
"tags": ["migrate to SigNoz"],
|
||||
"module": "home",
|
||||
"relatedSearchKeywords": [
|
||||
"grafana",
|
||||
"loki",
|
||||
"mirmir",
|
||||
"tempo",
|
||||
"alloy",
|
||||
"opentelemetry"
|
||||
],
|
||||
"imgUrl": "/Logos/grafana.svg",
|
||||
"link": "https://signoz.io/docs/migration/migrate-from-grafana/"
|
||||
},
|
||||
{
|
||||
"dataSource": "java",
|
||||
"entityID": "dataSource",
|
||||
@@ -1109,6 +1137,21 @@
|
||||
"id": "opentelemetry-cpp",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-cpp/"
|
||||
},
|
||||
{
|
||||
"dataSource": "nginx-tracing",
|
||||
"label": "Nginx - Tracing",
|
||||
"imgUrl": "/Logos/nginx.svg",
|
||||
"tags": ["apm"],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"tracing",
|
||||
"nginx server",
|
||||
"nginx proxy",
|
||||
"nginx"
|
||||
],
|
||||
"id": "nginx-tracing",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-nginx/"
|
||||
},
|
||||
{
|
||||
"dataSource": "kubernetes-pod-logs",
|
||||
"label": "Kubernetes Pod Logs",
|
||||
@@ -2874,5 +2917,43 @@
|
||||
"imgUrl": "/Logos/rds.svg",
|
||||
"link": "/integrations?integration=aws-integration&service=rds",
|
||||
"internalRedirect": true
|
||||
},
|
||||
{
|
||||
"dataSource": "temporal",
|
||||
"label": "Temporal",
|
||||
"imgUrl": "/Logos/temporal.svg",
|
||||
"tags": ["integrations"],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"temporal metrics",
|
||||
"temporal traces",
|
||||
"temporal logs",
|
||||
"temporal cloud",
|
||||
"temporal"
|
||||
],
|
||||
"question": {
|
||||
"desc": "What are you using ?",
|
||||
"type": "select",
|
||||
"options": [
|
||||
{
|
||||
"key": "temporal-cloud",
|
||||
"label": "Cloud Metrics",
|
||||
"imgUrl": "/Logos/temporal.svg",
|
||||
"link": "https://signoz.io/docs/integrations/temporal-cloud-metrics/"
|
||||
},
|
||||
{
|
||||
"key": "temporal-golang",
|
||||
"label": "Go",
|
||||
"imgUrl": "/Logos/go.svg",
|
||||
"link": "https://signoz.io/docs/integrations/temporal-golang-opentelemetry/"
|
||||
},
|
||||
{
|
||||
"key": "temporal-typescript",
|
||||
"label": "TypeScript",
|
||||
"imgUrl": "/Logos/javascript.svg",
|
||||
"link": "https://signoz.io/docs/integrations/temporal-typescript-opentelemetry/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -20,6 +20,7 @@ function PanelWrapper({
|
||||
openTracesButton,
|
||||
onOpenTraceBtnClick,
|
||||
customSeries,
|
||||
customOnRowClick,
|
||||
}: PanelWrapperProps): JSX.Element {
|
||||
const Component = PanelTypeVsPanelWrapper[
|
||||
selectedGraph || widget.panelTypes
|
||||
@@ -46,6 +47,7 @@ function PanelWrapper({
|
||||
searchTerm={searchTerm}
|
||||
openTracesButton={openTracesButton}
|
||||
onOpenTraceBtnClick={onOpenTraceBtnClick}
|
||||
customOnRowClick={customOnRowClick}
|
||||
customSeries={customSeries}
|
||||
/>
|
||||
);
|
||||
|
||||
@@ -11,6 +11,7 @@ function TablePanelWrapper({
|
||||
searchTerm,
|
||||
openTracesButton,
|
||||
onOpenTraceBtnClick,
|
||||
customOnRowClick,
|
||||
}: PanelWrapperProps): JSX.Element {
|
||||
const panelData =
|
||||
(queryResponse.data?.payload?.data?.result?.[0] as any)?.table || [];
|
||||
@@ -26,7 +27,10 @@ function TablePanelWrapper({
|
||||
searchTerm={searchTerm}
|
||||
openTracesButton={openTracesButton}
|
||||
onOpenTraceBtnClick={onOpenTraceBtnClick}
|
||||
customOnRowClick={customOnRowClick}
|
||||
widgetId={widget.id}
|
||||
renderColumnCell={widget.renderColumnCell}
|
||||
customColTitles={widget.customColTitles}
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
{...GRID_TABLE_CONFIG}
|
||||
/>
|
||||
|
||||
@@ -28,6 +28,7 @@ export type PanelWrapperProps = {
|
||||
customTooltipElement?: HTMLDivElement;
|
||||
openTracesButton?: boolean;
|
||||
onOpenTraceBtnClick?: (record: RowData) => void;
|
||||
customOnRowClick?: (record: RowData) => void;
|
||||
customSeries?: (data: QueryData[]) => uPlot.Series[];
|
||||
};
|
||||
|
||||
|
||||
@@ -24,6 +24,11 @@ export default function Toolbar({
|
||||
const isLogsExplorerPage = useMemo(() => pathname === ROUTES.LOGS_EXPLORER, [
|
||||
pathname,
|
||||
]);
|
||||
|
||||
const isApiMonitoringPage = useMemo(() => pathname === ROUTES.API_MONITORING, [
|
||||
pathname,
|
||||
]);
|
||||
|
||||
return (
|
||||
<div className="toolbar">
|
||||
<div className="leftActions">{leftActions}</div>
|
||||
@@ -31,7 +36,7 @@ export default function Toolbar({
|
||||
{showOldCTA && <NewExplorerCTA />}
|
||||
<DateTimeSelectionV2
|
||||
showAutoRefresh={showAutoRefresh}
|
||||
showRefreshText={!isLogsExplorerPage}
|
||||
showRefreshText={!isLogsExplorerPage && !isApiMonitoringPage}
|
||||
/>
|
||||
</div>
|
||||
<div className="rightActions">{rightActions}</div>
|
||||
|
||||
@@ -75,6 +75,8 @@ function DateTimeSelection({
|
||||
isModalTimeSelection = false,
|
||||
onTimeChange,
|
||||
modalSelectedInterval,
|
||||
modalInitialStartTime,
|
||||
modalInitialEndTime,
|
||||
}: Props): JSX.Element {
|
||||
const [formSelector] = Form.useForm();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
@@ -94,6 +96,36 @@ function DateTimeSelection({
|
||||
const [, handleCopyToClipboard] = useCopyToClipboard();
|
||||
const [isURLCopied, setIsURLCopied] = useState(false);
|
||||
|
||||
// Prioritize props for initial modal time, fallback to URL params
|
||||
let initialModalStartTime = 0;
|
||||
if (modalInitialStartTime !== undefined) {
|
||||
initialModalStartTime = modalInitialStartTime;
|
||||
} else if (searchStartTime) {
|
||||
initialModalStartTime = parseInt(searchStartTime, 10);
|
||||
}
|
||||
|
||||
let initialModalEndTime = 0;
|
||||
if (modalInitialEndTime !== undefined) {
|
||||
initialModalEndTime = modalInitialEndTime;
|
||||
} else if (searchEndTime) {
|
||||
initialModalEndTime = parseInt(searchEndTime, 10);
|
||||
}
|
||||
|
||||
const [modalStartTime, setModalStartTime] = useState<number>(
|
||||
initialModalStartTime,
|
||||
);
|
||||
const [modalEndTime, setModalEndTime] = useState<number>(initialModalEndTime);
|
||||
|
||||
// Effect to update modal time state when props change
|
||||
useEffect(() => {
|
||||
if (modalInitialStartTime !== undefined) {
|
||||
setModalStartTime(modalInitialStartTime);
|
||||
}
|
||||
if (modalInitialEndTime !== undefined) {
|
||||
setModalEndTime(modalInitialEndTime);
|
||||
}
|
||||
}, [modalInitialStartTime, modalInitialEndTime]);
|
||||
|
||||
const {
|
||||
localstorageStartTime,
|
||||
localstorageEndTime,
|
||||
@@ -212,7 +244,6 @@ function DateTimeSelection({
|
||||
|
||||
const startString = startTime.format(format);
|
||||
const endString = endTime.format(format);
|
||||
|
||||
return `${startString} - ${endString}`;
|
||||
}
|
||||
return timeInterval;
|
||||
@@ -383,13 +414,6 @@ function DateTimeSelection({
|
||||
}
|
||||
}, [defaultRelativeTime, onSelectHandler]);
|
||||
|
||||
const [modalStartTime, setModalStartTime] = useState<number>(
|
||||
searchStartTime ? parseInt(searchStartTime, 10) : 0,
|
||||
);
|
||||
const [modalEndTime, setModalEndTime] = useState<number>(
|
||||
searchEndTime ? parseInt(searchEndTime, 10) : 0,
|
||||
);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const onCustomDateHandler = (dateTimeRange: DateTimeRangeType): void => {
|
||||
if (dateTimeRange !== null) {
|
||||
@@ -864,6 +888,8 @@ interface DateTimeSelectionV2Props {
|
||||
dateTimeRange?: [number, number],
|
||||
) => void;
|
||||
modalSelectedInterval?: Time;
|
||||
modalInitialStartTime?: number;
|
||||
modalInitialEndTime?: number;
|
||||
}
|
||||
|
||||
DateTimeSelection.defaultProps = {
|
||||
@@ -875,6 +901,8 @@ DateTimeSelection.defaultProps = {
|
||||
isModalTimeSelection: false,
|
||||
onTimeChange: (): void => {},
|
||||
modalSelectedInterval: RelativeTimeMap['5m'] as Time,
|
||||
modalInitialStartTime: undefined,
|
||||
modalInitialEndTime: undefined,
|
||||
};
|
||||
interface DispatchProps {
|
||||
updateTimeInterval: (
|
||||
|
||||
@@ -59,6 +59,7 @@ export interface GetUPlotChartOptions {
|
||||
timezone?: string;
|
||||
customSeries?: (data: QueryData[]) => uPlot.Series[];
|
||||
isLogScale?: boolean;
|
||||
colorMapping?: Record<string, string>;
|
||||
}
|
||||
|
||||
/** the function converts series A , series B , series C to
|
||||
@@ -166,6 +167,7 @@ export const getUPlotChartOptions = ({
|
||||
timezone,
|
||||
customSeries,
|
||||
isLogScale,
|
||||
colorMapping,
|
||||
}: GetUPlotChartOptions): uPlot.Options => {
|
||||
const timeScaleProps = getXAxisScale(minTimeScale, maxTimeScale);
|
||||
|
||||
@@ -229,10 +231,11 @@ export const getUPlotChartOptions = ({
|
||||
tooltipPlugin({
|
||||
apiResponse,
|
||||
yAxisUnit,
|
||||
stackBarChart,
|
||||
isDarkMode,
|
||||
customTooltipElement,
|
||||
stackBarChart,
|
||||
timezone,
|
||||
colorMapping,
|
||||
customTooltipElement,
|
||||
}),
|
||||
onClickPlugin({
|
||||
onClick: onClickHandler,
|
||||
|
||||
@@ -48,6 +48,7 @@ const generateTooltipContent = (
|
||||
isMergedSeries?: boolean,
|
||||
stackBarChart?: boolean,
|
||||
timezone?: string,
|
||||
colorMapping?: Record<string, string>,
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
): HTMLElement => {
|
||||
const container = document.createElement('div');
|
||||
@@ -95,10 +96,12 @@ const generateTooltipContent = (
|
||||
? ''
|
||||
: getLabelName(metric, queryName || '', legend || '');
|
||||
|
||||
let color = generateColor(
|
||||
label,
|
||||
isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor,
|
||||
);
|
||||
let color =
|
||||
colorMapping?.[label] ||
|
||||
generateColor(
|
||||
label,
|
||||
isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor,
|
||||
);
|
||||
|
||||
// in case of billing graph pick colors from the series options
|
||||
if (isBillingUsageGraphs) {
|
||||
@@ -230,6 +233,7 @@ type ToolTipPluginProps = {
|
||||
isDarkMode: boolean;
|
||||
customTooltipElement?: HTMLDivElement;
|
||||
timezone?: string;
|
||||
colorMapping?: Record<string, string>;
|
||||
};
|
||||
|
||||
const tooltipPlugin = ({
|
||||
@@ -242,6 +246,7 @@ const tooltipPlugin = ({
|
||||
isDarkMode,
|
||||
customTooltipElement,
|
||||
timezone,
|
||||
colorMapping,
|
||||
}: // eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
ToolTipPluginProps): any => {
|
||||
let over: HTMLElement;
|
||||
@@ -309,6 +314,7 @@ ToolTipPluginProps): any => {
|
||||
isMergedSeries,
|
||||
stackBarChart,
|
||||
timezone,
|
||||
colorMapping,
|
||||
);
|
||||
if (customTooltipElement) {
|
||||
content.appendChild(customTooltipElement);
|
||||
|
||||
59
frontend/src/pages/ApiMonitoring/ApiMonitoringPage.test.tsx
Normal file
59
frontend/src/pages/ApiMonitoring/ApiMonitoringPage.test.tsx
Normal file
@@ -0,0 +1,59 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
|
||||
import ApiMonitoringPage from './ApiMonitoringPage';
|
||||
|
||||
// Mock the child component to isolate the ApiMonitoringPage logic
|
||||
// We are not testing ExplorerPage here, just that ApiMonitoringPage renders it via RouteTab.
|
||||
jest.mock('container/ApiMonitoring/Explorer/Explorer', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => <div>Mocked Explorer Page</div>,
|
||||
}));
|
||||
|
||||
// Mock the RouteTab component
|
||||
jest.mock('components/RouteTab', () => ({
|
||||
__esModule: true,
|
||||
default: ({
|
||||
routes,
|
||||
activeKey,
|
||||
}: {
|
||||
routes: any[];
|
||||
activeKey: string;
|
||||
}): JSX.Element => (
|
||||
<div data-testid="route-tab">
|
||||
<span>Active Key: {activeKey}</span>
|
||||
{/* Render the component defined in the route for the activeKey */}
|
||||
{routes.find((route) => route.key === activeKey)?.Component()}
|
||||
</div>
|
||||
),
|
||||
}));
|
||||
|
||||
// Mock useLocation hook to properly return the path we're testing
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: (): { pathname: string } => ({
|
||||
pathname: '/api-monitoring/explorer',
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('ApiMonitoringPage', () => {
|
||||
it('should render the RouteTab with the Explorer tab', () => {
|
||||
render(
|
||||
<MemoryRouter initialEntries={['/api-monitoring/explorer']}>
|
||||
<ApiMonitoringPage />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
// Check if the mock RouteTab is rendered
|
||||
expect(screen.getByTestId('route-tab')).toBeInTheDocument();
|
||||
|
||||
// Instead of checking for the mock component, just verify the RouteTab is there
|
||||
// and has the correct active key
|
||||
expect(screen.getByText(/Active Key:/)).toBeInTheDocument();
|
||||
|
||||
// We can't test for the Explorer page being rendered right now
|
||||
// but we'll verify the structure exists
|
||||
});
|
||||
|
||||
// Add more tests here later, e.g., testing navigation if more tabs were added
|
||||
});
|
||||
@@ -3,7 +3,10 @@ import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { GetWidgetQueryBuilderProps } from 'container/MetricsApplication/types';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
IBuilderFormula,
|
||||
IBuilderQuery,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
@@ -12,6 +15,7 @@ interface GetWidgetQueryProps {
|
||||
title: string;
|
||||
description: string;
|
||||
queryData: IBuilderQuery[];
|
||||
queryFormulas?: IBuilderFormula[];
|
||||
panelTypes?: PANEL_TYPES;
|
||||
yAxisUnit?: string;
|
||||
columnUnits?: Record<string, string>;
|
||||
@@ -67,7 +71,7 @@ export function getWidgetQuery(
|
||||
promql: [],
|
||||
builder: {
|
||||
queryData: props.queryData,
|
||||
queryFormulas: [],
|
||||
queryFormulas: (props.queryFormulas as IBuilderFormula[]) || [],
|
||||
},
|
||||
clickhouse_sql: [],
|
||||
id: uuid(),
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
|
||||
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
|
||||
import { QueryTableProps } from 'container/QueryTable/QueryTable.intefaces';
|
||||
import { ReactNode } from 'react';
|
||||
import { Layout } from 'react-grid-layout';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -113,6 +114,8 @@ export interface IBaseWidget {
|
||||
}
|
||||
export interface Widgets extends IBaseWidget {
|
||||
query: Query;
|
||||
renderColumnCell?: QueryTableProps['renderColumnCell'];
|
||||
customColTitles?: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface PromQLWidgets extends IBaseWidget {
|
||||
|
||||
80
pkg/cache/cache.go
vendored
80
pkg/cache/cache.go
vendored
@@ -2,70 +2,26 @@ package cache
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"time"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/types/cachetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// cacheable entity
|
||||
type CacheableEntity interface {
|
||||
encoding.BinaryMarshaler
|
||||
encoding.BinaryUnmarshaler
|
||||
}
|
||||
|
||||
func WrapCacheableEntityErrors(rt reflect.Type, caller string) error {
|
||||
if rt == nil {
|
||||
return fmt.Errorf("%s: (nil)", caller)
|
||||
}
|
||||
|
||||
if rt.Kind() != reflect.Pointer {
|
||||
return fmt.Errorf("%s: (non-pointer \"%s\")", caller, rt.String())
|
||||
}
|
||||
|
||||
return fmt.Errorf("%s: (nil \"%s\")", caller, rt.String())
|
||||
|
||||
}
|
||||
|
||||
// cache status
|
||||
type RetrieveStatus int
|
||||
|
||||
const (
|
||||
RetrieveStatusHit = RetrieveStatus(iota)
|
||||
RetrieveStatusPartialHit
|
||||
RetrieveStatusRangeMiss
|
||||
RetrieveStatusKeyMiss
|
||||
RetrieveStatusRevalidated
|
||||
|
||||
RetrieveStatusError
|
||||
)
|
||||
|
||||
func (s RetrieveStatus) String() string {
|
||||
switch s {
|
||||
case RetrieveStatusHit:
|
||||
return "hit"
|
||||
case RetrieveStatusPartialHit:
|
||||
return "partial hit"
|
||||
case RetrieveStatusRangeMiss:
|
||||
return "range miss"
|
||||
case RetrieveStatusKeyMiss:
|
||||
return "key miss"
|
||||
case RetrieveStatusRevalidated:
|
||||
return "revalidated"
|
||||
case RetrieveStatusError:
|
||||
return "error"
|
||||
default:
|
||||
return "unknown"
|
||||
}
|
||||
}
|
||||
|
||||
// cache interface
|
||||
type Cache interface {
|
||||
Connect(ctx context.Context) error
|
||||
Store(ctx context.Context, cacheKey string, data CacheableEntity, ttl time.Duration) error
|
||||
Retrieve(ctx context.Context, cacheKey string, dest CacheableEntity, allowExpired bool) (RetrieveStatus, error)
|
||||
SetTTL(ctx context.Context, cacheKey string, ttl time.Duration)
|
||||
Remove(ctx context.Context, cacheKey string)
|
||||
BulkRemove(ctx context.Context, cacheKeys []string)
|
||||
Close(ctx context.Context) error
|
||||
// Set sets the cacheable entity in cache.
|
||||
Set(ctx context.Context, orgID valuer.UUID, cacheKey string, data cachetypes.Cacheable, ttl time.Duration) error
|
||||
// Get gets the cacheble entity in the dest entity passed
|
||||
Get(ctx context.Context, orgID valuer.UUID, cacheKey string, dest cachetypes.Cacheable, allowExpired bool) error
|
||||
// Delete deletes the cacheable entity from cache
|
||||
Delete(ctx context.Context, orgID valuer.UUID, cacheKey string)
|
||||
// DeleteMany deletes multiple cacheble entities from cache
|
||||
DeleteMany(ctx context.Context, orgID valuer.UUID, cacheKeys []string)
|
||||
}
|
||||
|
||||
type KeyGenerator interface {
|
||||
// GenerateKeys generates the cache keys for the given query range params
|
||||
// The keys are returned as a map where the key is the query name and the value is the cache key
|
||||
GenerateKeys(*v3.QueryRangeParamsV3) map[string]string
|
||||
}
|
||||
|
||||
20
pkg/cache/cachetest/provider.go
vendored
Normal file
20
pkg/cache/cachetest/provider.go
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
package cachetest
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/cache/memorycache"
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
)
|
||||
|
||||
type provider struct{}
|
||||
|
||||
func New(config cache.Config) (cache.Cache, error) {
|
||||
cache, err := memorycache.New(context.TODO(), factorytest.NewSettings(), config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return cache, nil
|
||||
}
|
||||
2
pkg/cache/config.go
vendored
2
pkg/cache/config.go
vendored
@@ -9,7 +9,7 @@ import (
|
||||
|
||||
type Memory struct {
|
||||
TTL time.Duration `mapstructure:"ttl"`
|
||||
CleanupInterval time.Duration `mapstructure:"cleanupInterval"`
|
||||
CleanupInterval time.Duration `mapstructure:"cleanup_interval"`
|
||||
}
|
||||
|
||||
type Redis struct {
|
||||
|
||||
70
pkg/cache/memorycache/provider.go
vendored
70
pkg/cache/memorycache/provider.go
vendored
@@ -2,12 +2,15 @@ package memorycache
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/types/cachetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
go_cache "github.com/patrickmn/go-cache"
|
||||
)
|
||||
|
||||
@@ -23,79 +26,52 @@ func New(ctx context.Context, settings factory.ProviderSettings, config cache.Co
|
||||
return &provider{cc: go_cache.New(config.Memory.TTL, config.Memory.CleanupInterval)}, nil
|
||||
}
|
||||
|
||||
// Connect does nothing
|
||||
func (c *provider) Connect(_ context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Store stores the data in the cache
|
||||
func (c *provider) Store(_ context.Context, cacheKey string, data cache.CacheableEntity, ttl time.Duration) error {
|
||||
func (c *provider) Set(_ context.Context, orgID valuer.UUID, cacheKey string, data cachetypes.Cacheable, ttl time.Duration) error {
|
||||
// check if the data being passed is a pointer and is not nil
|
||||
rv := reflect.ValueOf(data)
|
||||
if rv.Kind() != reflect.Pointer || rv.IsNil() {
|
||||
return cache.WrapCacheableEntityErrors(reflect.TypeOf(data), "inmemory")
|
||||
err := cachetypes.ValidatePointer(data, "inmemory")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c.cc.Set(cacheKey, data, ttl)
|
||||
c.cc.Set(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"), data, ttl)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Retrieve retrieves the data from the cache
|
||||
func (c *provider) Retrieve(_ context.Context, cacheKey string, dest cache.CacheableEntity, allowExpired bool) (cache.RetrieveStatus, error) {
|
||||
func (c *provider) Get(_ context.Context, orgID valuer.UUID, cacheKey string, dest cachetypes.Cacheable, allowExpired bool) error {
|
||||
// check if the destination being passed is a pointer and is not nil
|
||||
dstv := reflect.ValueOf(dest)
|
||||
if dstv.Kind() != reflect.Pointer || dstv.IsNil() {
|
||||
return cache.RetrieveStatusError, cache.WrapCacheableEntityErrors(reflect.TypeOf(dest), "inmemory")
|
||||
err := cachetypes.ValidatePointer(dest, "inmemory")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// check if the destination value is settable
|
||||
dstv := reflect.ValueOf(dest)
|
||||
if !dstv.Elem().CanSet() {
|
||||
return cache.RetrieveStatusError, fmt.Errorf("destination value is not settable, %s", dstv.Elem())
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "destination value is not settable, %s", dstv.Elem())
|
||||
}
|
||||
|
||||
data, found := c.cc.Get(cacheKey)
|
||||
data, found := c.cc.Get(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"))
|
||||
if !found {
|
||||
return cache.RetrieveStatusKeyMiss, nil
|
||||
return errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "key miss")
|
||||
}
|
||||
|
||||
// check the type compatbility between the src and dest
|
||||
srcv := reflect.ValueOf(data)
|
||||
if !srcv.Type().AssignableTo(dstv.Type()) {
|
||||
return cache.RetrieveStatusError, fmt.Errorf("src type is not assignable to dst type")
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "src type is not assignable to dst type")
|
||||
}
|
||||
|
||||
// set the value to from src to dest
|
||||
dstv.Elem().Set(srcv.Elem())
|
||||
return cache.RetrieveStatusHit, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
// SetTTL sets the TTL for the cache entry
|
||||
func (c *provider) SetTTL(_ context.Context, cacheKey string, ttl time.Duration) {
|
||||
item, found := c.cc.Get(cacheKey)
|
||||
if !found {
|
||||
return
|
||||
}
|
||||
_ = c.cc.Replace(cacheKey, item, ttl)
|
||||
func (c *provider) Delete(_ context.Context, orgID valuer.UUID, cacheKey string) {
|
||||
c.cc.Delete(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"))
|
||||
}
|
||||
|
||||
// Remove removes the cache entry
|
||||
func (c *provider) Remove(_ context.Context, cacheKey string) {
|
||||
c.cc.Delete(cacheKey)
|
||||
}
|
||||
|
||||
// BulkRemove removes the cache entries
|
||||
func (c *provider) BulkRemove(_ context.Context, cacheKeys []string) {
|
||||
func (c *provider) DeleteMany(_ context.Context, orgID valuer.UUID, cacheKeys []string) {
|
||||
for _, cacheKey := range cacheKeys {
|
||||
c.cc.Delete(cacheKey)
|
||||
c.cc.Delete(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"))
|
||||
}
|
||||
}
|
||||
|
||||
// Close does nothing
|
||||
func (c *provider) Close(_ context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Configuration returns the cache configuration
|
||||
func (c *provider) Configuration() *cache.Memory {
|
||||
return nil
|
||||
}
|
||||
|
||||
97
pkg/cache/memorycache/provider_test.go
vendored
97
pkg/cache/memorycache/provider_test.go
vendored
@@ -8,6 +8,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
@@ -22,7 +23,6 @@ func TestNew(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, c)
|
||||
assert.NotNil(t, c.(*provider).cc)
|
||||
assert.NoError(t, c.Connect(context.Background()))
|
||||
}
|
||||
|
||||
type CacheableEntity struct {
|
||||
@@ -63,7 +63,7 @@ func TestStoreWithNilPointer(t *testing.T) {
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
var storeCacheableEntity *CacheableEntity
|
||||
assert.Error(t, c.Store(context.Background(), "key", storeCacheableEntity, 10*time.Second))
|
||||
assert.Error(t, c.Set(context.Background(), valuer.GenerateUUID(), "key", storeCacheableEntity, 10*time.Second))
|
||||
}
|
||||
|
||||
// this should fail because of no pointer error
|
||||
@@ -75,7 +75,7 @@ func TestStoreWithStruct(t *testing.T) {
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
var storeCacheableEntity CacheableEntity
|
||||
assert.Error(t, c.Store(context.Background(), "key", storeCacheableEntity, 10*time.Second))
|
||||
assert.Error(t, c.Set(context.Background(), valuer.GenerateUUID(), "key", storeCacheableEntity, 10*time.Second))
|
||||
}
|
||||
|
||||
func TestStoreWithNonNilPointer(t *testing.T) {
|
||||
@@ -90,7 +90,7 @@ func TestStoreWithNonNilPointer(t *testing.T) {
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
assert.NoError(t, c.Store(context.Background(), "key", storeCacheableEntity, 10*time.Second))
|
||||
assert.NoError(t, c.Set(context.Background(), valuer.GenerateUUID(), "key", storeCacheableEntity, 10*time.Second))
|
||||
}
|
||||
|
||||
// TestRetrieve tests the Retrieve function
|
||||
@@ -106,13 +106,14 @@ func TestRetrieveWithNilPointer(t *testing.T) {
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
assert.NoError(t, c.Store(context.Background(), "key", storeCacheableEntity, 10*time.Second))
|
||||
|
||||
orgID := valuer.GenerateUUID()
|
||||
assert.NoError(t, c.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second))
|
||||
|
||||
var retrieveCacheableEntity *CacheableEntity
|
||||
|
||||
retrieveStatus, err := c.Retrieve(context.Background(), "key", retrieveCacheableEntity, false)
|
||||
err = c.Get(context.Background(), orgID, "key", retrieveCacheableEntity, false)
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, retrieveStatus, cache.RetrieveStatusError)
|
||||
}
|
||||
|
||||
func TestRetrieveWitNonPointer(t *testing.T) {
|
||||
@@ -127,13 +128,13 @@ func TestRetrieveWitNonPointer(t *testing.T) {
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
assert.NoError(t, c.Store(context.Background(), "key", storeCacheableEntity, 10*time.Second))
|
||||
orgID := valuer.GenerateUUID()
|
||||
assert.NoError(t, c.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second))
|
||||
|
||||
var retrieveCacheableEntity CacheableEntity
|
||||
|
||||
retrieveStatus, err := c.Retrieve(context.Background(), "key", retrieveCacheableEntity, false)
|
||||
err = c.Get(context.Background(), orgID, "key", retrieveCacheableEntity, false)
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, retrieveStatus, cache.RetrieveStatusError)
|
||||
}
|
||||
|
||||
func TestRetrieveWithDifferentTypes(t *testing.T) {
|
||||
@@ -143,17 +144,17 @@ func TestRetrieveWithDifferentTypes(t *testing.T) {
|
||||
}
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
orgID := valuer.GenerateUUID()
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
assert.NoError(t, c.Store(context.Background(), "key", storeCacheableEntity, 10*time.Second))
|
||||
assert.NoError(t, c.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second))
|
||||
|
||||
retrieveCacheableEntity := new(DCacheableEntity)
|
||||
retrieveStatus, err := c.Retrieve(context.Background(), "key", retrieveCacheableEntity, false)
|
||||
err = c.Get(context.Background(), orgID, "key", retrieveCacheableEntity, false)
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, retrieveStatus, cache.RetrieveStatusError)
|
||||
}
|
||||
|
||||
func TestRetrieveWithSameTypes(t *testing.T) {
|
||||
@@ -163,46 +164,20 @@ func TestRetrieveWithSameTypes(t *testing.T) {
|
||||
}
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
orgID := valuer.GenerateUUID()
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
assert.NoError(t, c.Store(context.Background(), "key", storeCacheableEntity, 10*time.Second))
|
||||
assert.NoError(t, c.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second))
|
||||
|
||||
retrieveCacheableEntity := new(CacheableEntity)
|
||||
retrieveStatus, err := c.Retrieve(context.Background(), "key", retrieveCacheableEntity, false)
|
||||
err = c.Get(context.Background(), orgID, "key", retrieveCacheableEntity, false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, retrieveStatus, cache.RetrieveStatusHit)
|
||||
assert.Equal(t, storeCacheableEntity, retrieveCacheableEntity)
|
||||
}
|
||||
|
||||
// TestSetTTL tests the SetTTL function
|
||||
func TestSetTTL(t *testing.T) {
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{TTL: 10 * time.Second, CleanupInterval: 1 * time.Second}})
|
||||
require.NoError(t, err)
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
retrieveCacheableEntity := new(CacheableEntity)
|
||||
assert.NoError(t, c.Store(context.Background(), "key", storeCacheableEntity, 2*time.Second))
|
||||
time.Sleep(3 * time.Second)
|
||||
retrieveStatus, err := c.Retrieve(context.Background(), "key", retrieveCacheableEntity, false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, retrieveStatus, cache.RetrieveStatusKeyMiss)
|
||||
assert.Equal(t, new(CacheableEntity), retrieveCacheableEntity)
|
||||
|
||||
assert.NoError(t, c.Store(context.Background(), "key", storeCacheableEntity, 2*time.Second))
|
||||
c.SetTTL(context.Background(), "key", 4*time.Second)
|
||||
time.Sleep(3 * time.Second)
|
||||
retrieveStatus, err = c.Retrieve(context.Background(), "key", retrieveCacheableEntity, false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, retrieveStatus, cache.RetrieveStatusHit)
|
||||
assert.Equal(t, retrieveCacheableEntity, storeCacheableEntity)
|
||||
}
|
||||
|
||||
// TestRemove tests the Remove function
|
||||
func TestRemove(t *testing.T) {
|
||||
opts := cache.Memory{
|
||||
@@ -217,13 +192,12 @@ func TestRemove(t *testing.T) {
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
retrieveCacheableEntity := new(CacheableEntity)
|
||||
assert.NoError(t, c.Store(context.Background(), "key", storeCacheableEntity, 10*time.Second))
|
||||
c.Remove(context.Background(), "key")
|
||||
orgID := valuer.GenerateUUID()
|
||||
assert.NoError(t, c.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second))
|
||||
c.Delete(context.Background(), orgID, "key")
|
||||
|
||||
retrieveStatus, err := c.Retrieve(context.Background(), "key", retrieveCacheableEntity, false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, retrieveStatus, cache.RetrieveStatusKeyMiss)
|
||||
assert.Equal(t, new(CacheableEntity), retrieveCacheableEntity)
|
||||
err = c.Get(context.Background(), orgID, "key", retrieveCacheableEntity, false)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
// TestBulkRemove tests the BulkRemove function
|
||||
@@ -234,25 +208,22 @@ func TestBulkRemove(t *testing.T) {
|
||||
}
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
orgID := valuer.GenerateUUID()
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
retrieveCacheableEntity := new(CacheableEntity)
|
||||
assert.NoError(t, c.Store(context.Background(), "key1", storeCacheableEntity, 10*time.Second))
|
||||
assert.NoError(t, c.Store(context.Background(), "key2", storeCacheableEntity, 10*time.Second))
|
||||
c.BulkRemove(context.Background(), []string{"key1", "key2"})
|
||||
assert.NoError(t, c.Set(context.Background(), orgID, "key1", storeCacheableEntity, 10*time.Second))
|
||||
assert.NoError(t, c.Set(context.Background(), orgID, "key2", storeCacheableEntity, 10*time.Second))
|
||||
c.DeleteMany(context.Background(), orgID, []string{"key1", "key2"})
|
||||
|
||||
retrieveStatus, err := c.Retrieve(context.Background(), "key1", retrieveCacheableEntity, false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, retrieveStatus, cache.RetrieveStatusKeyMiss)
|
||||
assert.Equal(t, new(CacheableEntity), retrieveCacheableEntity)
|
||||
err = c.Get(context.Background(), orgID, "key1", retrieveCacheableEntity, false)
|
||||
assert.Error(t, err)
|
||||
|
||||
retrieveStatus, err = c.Retrieve(context.Background(), "key2", retrieveCacheableEntity, false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, retrieveStatus, cache.RetrieveStatusKeyMiss)
|
||||
assert.Equal(t, new(CacheableEntity), retrieveCacheableEntity)
|
||||
err = c.Get(context.Background(), orgID, "key2", retrieveCacheableEntity, false)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
// TestCache tests the cache
|
||||
@@ -263,16 +234,16 @@ func TestCache(t *testing.T) {
|
||||
}
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
orgID := valuer.GenerateUUID()
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
retrieveCacheableEntity := new(CacheableEntity)
|
||||
assert.NoError(t, c.Store(context.Background(), "key", storeCacheableEntity, 10*time.Second))
|
||||
retrieveStatus, err := c.Retrieve(context.Background(), "key", retrieveCacheableEntity, false)
|
||||
assert.NoError(t, c.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second))
|
||||
err = c.Get(context.Background(), orgID, "key", retrieveCacheableEntity, false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, retrieveStatus, cache.RetrieveStatusHit)
|
||||
assert.Equal(t, storeCacheableEntity, retrieveCacheableEntity)
|
||||
c.Remove(context.Background(), "key")
|
||||
c.Delete(context.Background(), orgID, "key")
|
||||
}
|
||||
|
||||
119
pkg/cache/rediscache/provider.go
vendored
119
pkg/cache/rediscache/provider.go
vendored
@@ -3,18 +3,22 @@ package rediscache
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"fmt"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/types/cachetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/go-redis/redis/v8"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
client *redis.Client
|
||||
opts cache.Redis
|
||||
}
|
||||
|
||||
func NewFactory() factory.ProviderFactory[cache.Cache, cache.Config] {
|
||||
@@ -22,99 +26,50 @@ func NewFactory() factory.ProviderFactory[cache.Cache, cache.Config] {
|
||||
}
|
||||
|
||||
func New(ctx context.Context, settings factory.ProviderSettings, config cache.Config) (cache.Cache, error) {
|
||||
return &provider{opts: config.Redis}, nil
|
||||
provider := new(provider)
|
||||
provider.client = redis.NewClient(&redis.Options{
|
||||
Addr: strings.Join([]string{config.Redis.Host, fmt.Sprint(config.Redis.Port)}, ":"),
|
||||
Password: config.Redis.Password,
|
||||
DB: config.Redis.DB,
|
||||
})
|
||||
|
||||
if err := provider.client.Ping(ctx).Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return provider, nil
|
||||
}
|
||||
|
||||
// WithClient creates a new cache with the given client
|
||||
func WithClient(client *redis.Client) *provider {
|
||||
return &provider{client: client}
|
||||
}
|
||||
|
||||
// Connect connects to the redis server
|
||||
func (c *provider) Connect(_ context.Context) error {
|
||||
c.client = redis.NewClient(&redis.Options{
|
||||
Addr: fmt.Sprintf("%s:%d", c.opts.Host, c.opts.Port),
|
||||
Password: c.opts.Password,
|
||||
DB: c.opts.DB,
|
||||
})
|
||||
func (c *provider) Set(ctx context.Context, orgID valuer.UUID, cacheKey string, data cachetypes.Cacheable, ttl time.Duration) error {
|
||||
return c.client.Set(ctx, strings.Join([]string{orgID.StringValue(), cacheKey}, "::"), data, ttl).Err()
|
||||
}
|
||||
|
||||
func (c *provider) Get(ctx context.Context, orgID valuer.UUID, cacheKey string, dest cachetypes.Cacheable, allowExpired bool) error {
|
||||
err := c.client.Get(ctx, strings.Join([]string{orgID.StringValue(), cacheKey}, "::")).Scan(dest)
|
||||
if err != nil {
|
||||
if errors.Is(err, redis.Nil) {
|
||||
return errorsV2.Newf(errorsV2.TypeNotFound, errorsV2.CodeNotFound, "key miss")
|
||||
}
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Store stores the data in the cache
|
||||
func (c *provider) Store(ctx context.Context, cacheKey string, data cache.CacheableEntity, ttl time.Duration) error {
|
||||
return c.client.Set(ctx, cacheKey, data, ttl).Err()
|
||||
func (c *provider) Delete(ctx context.Context, orgID valuer.UUID, cacheKey string) {
|
||||
c.DeleteMany(ctx, orgID, []string{cacheKey})
|
||||
}
|
||||
|
||||
// Retrieve retrieves the data from the cache
|
||||
func (c *provider) Retrieve(ctx context.Context, cacheKey string, dest cache.CacheableEntity, allowExpired bool) (cache.RetrieveStatus, error) {
|
||||
err := c.client.Get(ctx, cacheKey).Scan(dest)
|
||||
if err != nil {
|
||||
if errors.Is(err, redis.Nil) {
|
||||
return cache.RetrieveStatusKeyMiss, nil
|
||||
}
|
||||
return cache.RetrieveStatusError, err
|
||||
func (c *provider) DeleteMany(ctx context.Context, orgID valuer.UUID, cacheKeys []string) {
|
||||
updatedCacheKeys := []string{}
|
||||
for _, cacheKey := range cacheKeys {
|
||||
updatedCacheKeys = append(updatedCacheKeys, strings.Join([]string{orgID.StringValue(), cacheKey}, "::"))
|
||||
}
|
||||
return cache.RetrieveStatusHit, nil
|
||||
}
|
||||
|
||||
// SetTTL sets the TTL for the cache entry
|
||||
func (c *provider) SetTTL(ctx context.Context, cacheKey string, ttl time.Duration) {
|
||||
err := c.client.Expire(ctx, cacheKey, ttl).Err()
|
||||
if err != nil {
|
||||
zap.L().Error("error setting TTL for cache key", zap.String("cacheKey", cacheKey), zap.Duration("ttl", ttl), zap.Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
// Remove removes the cache entry
|
||||
func (c *provider) Remove(ctx context.Context, cacheKey string) {
|
||||
c.BulkRemove(ctx, []string{cacheKey})
|
||||
}
|
||||
|
||||
// BulkRemove removes the cache entries
|
||||
func (c *provider) BulkRemove(ctx context.Context, cacheKeys []string) {
|
||||
if err := c.client.Del(ctx, cacheKeys...).Err(); err != nil {
|
||||
if err := c.client.Del(ctx, updatedCacheKeys...).Err(); err != nil {
|
||||
zap.L().Error("error deleting cache keys", zap.Strings("cacheKeys", cacheKeys), zap.Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
// Close closes the connection to the redis server
|
||||
func (c *provider) Close(_ context.Context) error {
|
||||
return c.client.Close()
|
||||
}
|
||||
|
||||
// Ping pings the redis server
|
||||
func (c *provider) Ping(ctx context.Context) error {
|
||||
return c.client.Ping(ctx).Err()
|
||||
}
|
||||
|
||||
// GetClient returns the redis client
|
||||
func (c *provider) GetClient() *redis.Client {
|
||||
return c.client
|
||||
}
|
||||
|
||||
// GetTTL returns the TTL for the cache entry
|
||||
func (c *provider) GetTTL(ctx context.Context, cacheKey string) time.Duration {
|
||||
ttl, err := c.client.TTL(ctx, cacheKey).Result()
|
||||
if err != nil {
|
||||
zap.L().Error("error getting TTL for cache key", zap.String("cacheKey", cacheKey), zap.Error(err))
|
||||
}
|
||||
return ttl
|
||||
}
|
||||
|
||||
// GetKeys returns the keys matching the pattern
|
||||
func (c *provider) GetKeys(ctx context.Context, pattern string) ([]string, error) {
|
||||
return c.client.Keys(ctx, pattern).Result()
|
||||
}
|
||||
|
||||
// GetKeysWithTTL returns the keys matching the pattern with their TTL
|
||||
func (c *provider) GetKeysWithTTL(ctx context.Context, pattern string) (map[string]time.Duration, error) {
|
||||
keys, err := c.GetKeys(ctx, pattern)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result := make(map[string]time.Duration)
|
||||
for _, key := range keys {
|
||||
result[key] = c.GetTTL(ctx, key)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
72
pkg/cache/rediscache/provider_test.go
vendored
72
pkg/cache/rediscache/provider_test.go
vendored
@@ -3,10 +3,11 @@ package rediscache
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
_cache "github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/go-redis/redismock/v8"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
@@ -25,7 +26,7 @@ func (ce *CacheableEntity) UnmarshalBinary(data []byte) error {
|
||||
return json.Unmarshal(data, ce)
|
||||
}
|
||||
|
||||
func TestStore(t *testing.T) {
|
||||
func TestSet(t *testing.T) {
|
||||
db, mock := redismock.NewClientMock()
|
||||
cache := WithClient(db)
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
@@ -34,15 +35,16 @@ func TestStore(t *testing.T) {
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
|
||||
mock.ExpectSet("key", storeCacheableEntity, 10*time.Second).RedisNil()
|
||||
_ = cache.Store(context.Background(), "key", storeCacheableEntity, 10*time.Second)
|
||||
orgID := valuer.GenerateUUID()
|
||||
mock.ExpectSet(strings.Join([]string{orgID.StringValue(), "key"}, "::"), storeCacheableEntity, 10*time.Second).RedisNil()
|
||||
_ = cache.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second)
|
||||
|
||||
if err := mock.ExpectationsWereMet(); err != nil {
|
||||
t.Errorf("there were unfulfilled expectations: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRetrieve(t *testing.T) {
|
||||
func TestGet(t *testing.T) {
|
||||
db, mock := redismock.NewClientMock()
|
||||
cache := WithClient(db)
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
@@ -52,50 +54,26 @@ func TestRetrieve(t *testing.T) {
|
||||
}
|
||||
retrieveCacheableEntity := new(CacheableEntity)
|
||||
|
||||
mock.ExpectSet("key", storeCacheableEntity, 10*time.Second).RedisNil()
|
||||
_ = cache.Store(context.Background(), "key", storeCacheableEntity, 10*time.Second)
|
||||
orgID := valuer.GenerateUUID()
|
||||
mock.ExpectSet(strings.Join([]string{orgID.StringValue(), "key"}, "::"), storeCacheableEntity, 10*time.Second).RedisNil()
|
||||
_ = cache.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second)
|
||||
|
||||
data, err := storeCacheableEntity.MarshalBinary()
|
||||
assert.NoError(t, err)
|
||||
|
||||
mock.ExpectGet("key").SetVal(string(data))
|
||||
retrieveStatus, err := cache.Retrieve(context.Background(), "key", retrieveCacheableEntity, false)
|
||||
mock.ExpectGet(strings.Join([]string{orgID.StringValue(), "key"}, "::")).SetVal(string(data))
|
||||
err = cache.Get(context.Background(), orgID, "key", retrieveCacheableEntity, false)
|
||||
if err != nil {
|
||||
t.Errorf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
if retrieveStatus != _cache.RetrieveStatusHit {
|
||||
t.Errorf("expected status %d, got %d", _cache.RetrieveStatusHit, retrieveStatus)
|
||||
}
|
||||
|
||||
assert.Equal(t, storeCacheableEntity, retrieveCacheableEntity)
|
||||
|
||||
if err := mock.ExpectationsWereMet(); err != nil {
|
||||
t.Errorf("there were unfulfilled expectations: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSetTTL(t *testing.T) {
|
||||
db, mock := redismock.NewClientMock()
|
||||
cache := WithClient(db)
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
|
||||
mock.ExpectSet("key", storeCacheableEntity, 10*time.Second).RedisNil()
|
||||
_ = cache.Store(context.Background(), "key", storeCacheableEntity, 10*time.Second)
|
||||
|
||||
mock.ExpectExpire("key", 4*time.Second).RedisNil()
|
||||
cache.SetTTL(context.Background(), "key", 4*time.Second)
|
||||
|
||||
if err := mock.ExpectationsWereMet(); err != nil {
|
||||
t.Errorf("there were unfulfilled expectations: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRemove(t *testing.T) {
|
||||
func TestDelete(t *testing.T) {
|
||||
db, mock := redismock.NewClientMock()
|
||||
c := WithClient(db)
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
@@ -103,19 +81,20 @@ func TestRemove(t *testing.T) {
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
orgID := valuer.GenerateUUID()
|
||||
|
||||
mock.ExpectSet("key", storeCacheableEntity, 10*time.Second).RedisNil()
|
||||
_ = c.Store(context.Background(), "key", storeCacheableEntity, 10*time.Second)
|
||||
mock.ExpectSet(strings.Join([]string{orgID.StringValue(), "key"}, "::"), storeCacheableEntity, 10*time.Second).RedisNil()
|
||||
_ = c.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second)
|
||||
|
||||
mock.ExpectDel("key").RedisNil()
|
||||
c.Remove(context.Background(), "key")
|
||||
mock.ExpectDel(strings.Join([]string{orgID.StringValue(), "key"}, "::")).RedisNil()
|
||||
c.Delete(context.Background(), orgID, "key")
|
||||
|
||||
if err := mock.ExpectationsWereMet(); err != nil {
|
||||
t.Errorf("there were unfulfilled expectations: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBulkRemove(t *testing.T) {
|
||||
func TestDeleteMany(t *testing.T) {
|
||||
db, mock := redismock.NewClientMock()
|
||||
c := WithClient(db)
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
@@ -123,15 +102,16 @@ func TestBulkRemove(t *testing.T) {
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
orgID := valuer.GenerateUUID()
|
||||
|
||||
mock.ExpectSet("key", storeCacheableEntity, 10*time.Second).RedisNil()
|
||||
_ = c.Store(context.Background(), "key", storeCacheableEntity, 10*time.Second)
|
||||
mock.ExpectSet(strings.Join([]string{orgID.StringValue(), "key"}, "::"), storeCacheableEntity, 10*time.Second).RedisNil()
|
||||
_ = c.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second)
|
||||
|
||||
mock.ExpectSet("key2", storeCacheableEntity, 10*time.Second).RedisNil()
|
||||
_ = c.Store(context.Background(), "key2", storeCacheableEntity, 10*time.Second)
|
||||
mock.ExpectSet(strings.Join([]string{orgID.StringValue(), "key2"}, "::"), storeCacheableEntity, 10*time.Second).RedisNil()
|
||||
_ = c.Set(context.Background(), orgID, "key2", storeCacheableEntity, 10*time.Second)
|
||||
|
||||
mock.ExpectDel("key", "key2").RedisNil()
|
||||
c.BulkRemove(context.Background(), []string{"key", "key2"})
|
||||
mock.ExpectDel(strings.Join([]string{orgID.StringValue(), "key"}, "::"), strings.Join([]string{orgID.StringValue(), "key2"}, "::")).RedisNil()
|
||||
c.DeleteMany(context.Background(), orgID, []string{"key", "key2"})
|
||||
|
||||
if err := mock.ExpectationsWereMet(); err != nil {
|
||||
t.Errorf("there were unfulfilled expectations: %s", err)
|
||||
|
||||
87
pkg/modules/quickfilter/api.go
Normal file
87
pkg/modules/quickfilter/api.go
Normal file
@@ -0,0 +1,87 @@
|
||||
package quickfilter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/quickfiltertypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type API interface {
|
||||
GetQuickFilters(http.ResponseWriter, *http.Request)
|
||||
UpdateQuickFilters(http.ResponseWriter, *http.Request)
|
||||
GetSignalFilters(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
|
||||
type quickFiltersAPI struct {
|
||||
usecase Usecase
|
||||
}
|
||||
|
||||
func NewAPI(usecase Usecase) API {
|
||||
return &quickFiltersAPI{usecase: usecase}
|
||||
}
|
||||
|
||||
func (q *quickFiltersAPI) GetQuickFilters(rw http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
filters, err := q.usecase.GetQuickFilters(r.Context(), valuer.MustNewUUID(claims.OrgID))
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, filters)
|
||||
}
|
||||
|
||||
func (q *quickFiltersAPI) UpdateQuickFilters(rw http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var req quickfiltertypes.UpdatableQuickFilters
|
||||
decodeErr := json.NewDecoder(r.Body).Decode(&req)
|
||||
if decodeErr != nil {
|
||||
render.Error(rw, decodeErr)
|
||||
return
|
||||
}
|
||||
|
||||
err = q.usecase.UpdateQuickFilters(r.Context(), valuer.MustNewUUID(claims.OrgID), req.Signal, req.Filters)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (q *quickFiltersAPI) GetSignalFilters(rw http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
signal := mux.Vars(r)["signal"]
|
||||
validatedSignal, err := quickfiltertypes.NewSignal(signal)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
filters, err := q.usecase.GetSignalFilters(r.Context(), valuer.MustNewUUID(claims.OrgID), validatedSignal)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, filters)
|
||||
}
|
||||
116
pkg/modules/quickfilter/core/core.go
Normal file
116
pkg/modules/quickfilter/core/core.go
Normal file
@@ -0,0 +1,116 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/types/quickfiltertypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type usecase struct {
|
||||
store quickfiltertypes.QuickFilterStore
|
||||
}
|
||||
|
||||
// NewQuickFilters creates a new quick filters usecase
|
||||
func NewQuickFilters(store quickfiltertypes.QuickFilterStore) quickfilter.Usecase {
|
||||
return &usecase{store: store}
|
||||
}
|
||||
|
||||
// GetQuickFilters returns all quick filters for an organization
|
||||
func (u *usecase) GetQuickFilters(ctx context.Context, orgID valuer.UUID) ([]*quickfiltertypes.SignalFilters, error) {
|
||||
storedFilters, err := u.store.Get(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error fetching organization filters")
|
||||
}
|
||||
|
||||
result := make([]*quickfiltertypes.SignalFilters, 0, len(storedFilters))
|
||||
for _, storedFilter := range storedFilters {
|
||||
signalFilter, err := quickfiltertypes.NewSignalFilterFromStorableQuickFilter(storedFilter)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error processing filter for signal: %s", storedFilter.Signal)
|
||||
}
|
||||
result = append(result, signalFilter)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// GetSignalFilters returns quick filters for a specific signal in an organization
|
||||
func (u *usecase) GetSignalFilters(ctx context.Context, orgID valuer.UUID, signal quickfiltertypes.Signal) (*quickfiltertypes.SignalFilters, error) {
|
||||
storedFilter, err := u.store.GetBySignal(ctx, orgID, signal.StringValue())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// If no filter exists for this signal, return empty filters with the requested signal
|
||||
if storedFilter == nil {
|
||||
return &quickfiltertypes.SignalFilters{
|
||||
Signal: signal,
|
||||
Filters: []v3.AttributeKey{},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Convert stored filter to signal filter
|
||||
signalFilter, err := quickfiltertypes.NewSignalFilterFromStorableQuickFilter(storedFilter)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error processing filter for signal: %s", storedFilter.Signal)
|
||||
}
|
||||
|
||||
return signalFilter, nil
|
||||
}
|
||||
|
||||
// UpdateQuickFilters updates quick filters for a specific signal in an organization
|
||||
func (u *usecase) UpdateQuickFilters(ctx context.Context, orgID valuer.UUID, signal quickfiltertypes.Signal, filters []v3.AttributeKey) error {
|
||||
// Validate each filter
|
||||
for _, filter := range filters {
|
||||
if err := filter.Validate(); err != nil {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid filter: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Marshal filters to JSON
|
||||
filterJSON, err := json.Marshal(filters)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error marshalling filters")
|
||||
}
|
||||
|
||||
// Check if filter exists
|
||||
existingFilter, err := u.store.GetBySignal(ctx, orgID, signal.StringValue())
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error checking existing filters")
|
||||
}
|
||||
|
||||
var filter *quickfiltertypes.StorableQuickFilter
|
||||
if existingFilter != nil {
|
||||
// Update in place
|
||||
if err := existingFilter.Update(filterJSON); err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "error updating existing filter")
|
||||
}
|
||||
filter = existingFilter
|
||||
} else {
|
||||
// Create new
|
||||
filter, err = quickfiltertypes.NewStorableQuickFilter(orgID, signal, filterJSON)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "error creating new filter")
|
||||
}
|
||||
}
|
||||
|
||||
// Persist filter
|
||||
if err := u.store.Upsert(ctx, filter); err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, fmt.Sprintf("error upserting filter for signal: %s", signal.StringValue()))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (u *usecase) SetDefaultConfig(ctx context.Context, orgID valuer.UUID) error {
|
||||
storableQuickFilters, err := quickfiltertypes.NewDefaultQuickFilter(orgID)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error creating default quick filters")
|
||||
}
|
||||
return u.store.Create(ctx, storableQuickFilters)
|
||||
}
|
||||
92
pkg/modules/quickfilter/core/store.go
Normal file
92
pkg/modules/quickfilter/core/store.go
Normal file
@@ -0,0 +1,92 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/quickfiltertypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type store struct {
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
// NewStore creates a new SQLite store for quick filters
|
||||
func NewStore(db sqlstore.SQLStore) quickfiltertypes.QuickFilterStore {
|
||||
return &store{store: db}
|
||||
}
|
||||
|
||||
// GetQuickFilters retrieves all filters for an organization
|
||||
func (s *store) Get(ctx context.Context, orgID valuer.UUID) ([]*quickfiltertypes.StorableQuickFilter, error) {
|
||||
filters := make([]*quickfiltertypes.StorableQuickFilter, 0)
|
||||
|
||||
err := s.store.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(&filters).
|
||||
Where("org_id = ?", orgID).
|
||||
Order("signal ASC").
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
return filters, err
|
||||
}
|
||||
|
||||
return filters, nil
|
||||
}
|
||||
|
||||
// GetSignalFilters retrieves filters for a specific signal in an organization
|
||||
func (s *store) GetBySignal(ctx context.Context, orgID valuer.UUID, signal string) (*quickfiltertypes.StorableQuickFilter, error) {
|
||||
filter := new(quickfiltertypes.StorableQuickFilter)
|
||||
|
||||
err := s.store.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(filter).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("signal = ?", signal).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, s.store.WrapNotFoundErrf(err, errors.CodeNotFound, "No rows found for org_id: "+orgID.StringValue()+" signal: "+signal)
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return filter, nil
|
||||
}
|
||||
|
||||
// UpsertQuickFilter inserts or updates filters for an organization and signal
|
||||
func (s *store) Upsert(ctx context.Context, filter *quickfiltertypes.StorableQuickFilter) error {
|
||||
_, err := s.store.
|
||||
BunDB().
|
||||
NewInsert().
|
||||
Model(filter).
|
||||
On("CONFLICT (id) DO UPDATE").
|
||||
Set("filter = EXCLUDED.filter").
|
||||
Set("updated_at = EXCLUDED.updated_at").
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *store) Create(ctx context.Context, filters []*quickfiltertypes.StorableQuickFilter) error {
|
||||
// Using SQLite-specific conflict resolution
|
||||
_, err := s.store.
|
||||
BunDB().
|
||||
NewInsert().
|
||||
Model(&filters).
|
||||
On("CONFLICT (org_id, signal) DO NOTHING").
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return s.store.WrapAlreadyExistsErrf(err, errors.CodeAlreadyExists, "Quick Filter can not be created")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
15
pkg/modules/quickfilter/usecase.go
Normal file
15
pkg/modules/quickfilter/usecase.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package quickfilter
|
||||
|
||||
import (
|
||||
"context"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/types/quickfiltertypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Usecase interface {
|
||||
GetQuickFilters(ctx context.Context, orgID valuer.UUID) ([]*quickfiltertypes.SignalFilters, error)
|
||||
UpdateQuickFilters(ctx context.Context, orgID valuer.UUID, signal quickfiltertypes.Signal, filters []v3.AttributeKey) error
|
||||
GetSignalFilters(ctx context.Context, orgID valuer.UUID, signal quickfiltertypes.Signal) (*quickfiltertypes.SignalFilters, error)
|
||||
SetDefaultConfig(ctx context.Context, orgID valuer.UUID) error
|
||||
}
|
||||
@@ -1,533 +0,0 @@
|
||||
package impltracefunnel
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
tf "github.com/SigNoz/signoz/pkg/types/tracefunnel"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
module tracefunnel.Module
|
||||
}
|
||||
|
||||
func NewHandler(module tracefunnel.Module) tracefunnel.Handler {
|
||||
return &handler{module: module}
|
||||
}
|
||||
|
||||
func (handler *handler) New(rw http.ResponseWriter, r *http.Request) {
|
||||
var req tf.FunnelRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
userID := claims.UserID
|
||||
orgID := claims.OrgID
|
||||
|
||||
funnels, err := handler.module.List(r.Context(), orgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, f := range funnels {
|
||||
if f.Name == req.Name {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "a funnel with name '%s' already exists in this organization", req.Name))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
funnel, err := handler.module.Create(r.Context(), req.Timestamp, req.Name, userID, orgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to create funnel"))
|
||||
return
|
||||
}
|
||||
|
||||
response := tf.FunnelResponse{
|
||||
FunnelID: funnel.ID.String(),
|
||||
FunnelName: funnel.Name,
|
||||
CreatedAt: req.Timestamp,
|
||||
UserEmail: claims.Email,
|
||||
OrgID: orgID,
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, response)
|
||||
}
|
||||
|
||||
func (handler *handler) UpdateSteps(rw http.ResponseWriter, r *http.Request) {
|
||||
var req tf.FunnelRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
userID := claims.UserID
|
||||
orgID := claims.OrgID
|
||||
|
||||
if err := tracefunnel.ValidateTimestamp(req.Timestamp, "timestamp"); err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "timestamp is invalid: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
funnel, err := handler.module.Get(r.Context(), req.FunnelID.String())
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "funnel not found: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
// Check if name is being updated and if it already exists
|
||||
if req.Name != "" && req.Name != funnel.Name {
|
||||
funnels, err := handler.module.List(r.Context(), orgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to list funnels: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
for _, f := range funnels {
|
||||
if f.Name == req.Name {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "a funnel with name '%s' already exists in this organization", req.Name))
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process each step in the request
|
||||
for i := range req.Steps {
|
||||
if req.Steps[i].Order < 1 {
|
||||
req.Steps[i].Order = int64(i + 1) // Default to sequential ordering if not specified
|
||||
}
|
||||
// Generate a new UUID for the step if it doesn't have one
|
||||
if req.Steps[i].Id.IsZero() {
|
||||
newUUID := valuer.GenerateUUID()
|
||||
req.Steps[i].Id = newUUID
|
||||
}
|
||||
}
|
||||
|
||||
if err := tracefunnel.ValidateFunnelSteps(req.Steps); err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid funnel steps: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
// Normalize step orders
|
||||
req.Steps = tracefunnel.NormalizeFunnelSteps(req.Steps)
|
||||
|
||||
// UpdateSteps the funnel with new steps
|
||||
funnel.Steps = req.Steps
|
||||
funnel.UpdatedAt = time.Unix(0, req.Timestamp*1000000) // Convert to nanoseconds
|
||||
funnel.UpdatedBy = userID
|
||||
|
||||
if req.Name != "" {
|
||||
funnel.Name = req.Name
|
||||
}
|
||||
if req.Description != "" {
|
||||
funnel.Description = req.Description
|
||||
}
|
||||
|
||||
// UpdateSteps funnel in database
|
||||
err = handler.module.Update(r.Context(), funnel, userID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to update funnel in database: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
//// UpdateSteps name and description if provided
|
||||
//if req.Name != "" || req.Description != "" {
|
||||
// name := req.Name
|
||||
//
|
||||
// description := req.Description
|
||||
//
|
||||
// err = handler.module.UpdateMetadata(r.Context(), funnel.ID, name, description, userID)
|
||||
// if err != nil {
|
||||
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to update funnel metadata: %v", err))
|
||||
// return
|
||||
// }
|
||||
//}
|
||||
|
||||
// Get the updated funnel to return in response
|
||||
updatedFunnel, err := handler.module.Get(r.Context(), funnel.ID.String())
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to get updated funnel: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
response := tf.FunnelResponse{
|
||||
FunnelName: updatedFunnel.Name,
|
||||
FunnelID: updatedFunnel.ID.String(),
|
||||
Steps: updatedFunnel.Steps,
|
||||
CreatedAt: updatedFunnel.CreatedAt.UnixNano() / 1000000,
|
||||
CreatedBy: updatedFunnel.CreatedBy,
|
||||
OrgID: updatedFunnel.OrgID.String(),
|
||||
UpdatedBy: userID,
|
||||
UpdatedAt: updatedFunnel.UpdatedAt.UnixNano() / 1000000,
|
||||
Description: updatedFunnel.Description,
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, response)
|
||||
}
|
||||
|
||||
func (handler *handler) UpdateFunnel(rw http.ResponseWriter, r *http.Request) {
|
||||
var req tf.FunnelRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
userID := claims.UserID
|
||||
orgID := claims.OrgID
|
||||
|
||||
if err := tracefunnel.ValidateTimestamp(req.Timestamp, "timestamp"); err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "timestamp is invalid: %v", err))
|
||||
return
|
||||
}
|
||||
vars := mux.Vars(r)
|
||||
funnelID := vars["funnel_id"]
|
||||
|
||||
funnel, err := handler.module.Get(r.Context(), funnelID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "funnel not found: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
// Check if name is being updated and if it already exists
|
||||
if req.Name != "" && req.Name != funnel.Name {
|
||||
funnels, err := handler.module.List(r.Context(), orgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to list funnels: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
for _, f := range funnels {
|
||||
if f.Name == req.Name {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "a funnel with name '%s' already exists in this organization", req.Name))
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
funnel.UpdatedAt = time.Unix(0, req.Timestamp*1000000) // Convert to nanoseconds
|
||||
funnel.UpdatedBy = userID
|
||||
|
||||
if req.Name != "" {
|
||||
funnel.Name = req.Name
|
||||
}
|
||||
if req.Description != "" {
|
||||
funnel.Description = req.Description
|
||||
}
|
||||
|
||||
// Update funnel in database
|
||||
err = handler.module.Update(r.Context(), funnel, userID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to update funnel in database: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
// Get the updated funnel to return in response
|
||||
updatedFunnel, err := handler.module.Get(r.Context(), funnel.ID.String())
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to get updated funnel: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
response := tf.FunnelResponse{
|
||||
FunnelName: updatedFunnel.Name,
|
||||
FunnelID: updatedFunnel.ID.String(),
|
||||
Steps: updatedFunnel.Steps,
|
||||
CreatedAt: updatedFunnel.CreatedAt.UnixNano() / 1000000,
|
||||
CreatedBy: updatedFunnel.CreatedBy,
|
||||
OrgID: updatedFunnel.OrgID.String(),
|
||||
UpdatedBy: userID,
|
||||
UpdatedAt: updatedFunnel.UpdatedAt.UnixNano() / 1000000,
|
||||
Description: updatedFunnel.Description,
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, response)
|
||||
}
|
||||
|
||||
func (handler *handler) List(rw http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
orgID := claims.OrgID
|
||||
funnels, err := handler.module.List(r.Context(), orgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to list funnels: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
var response []tf.FunnelResponse
|
||||
for _, f := range funnels {
|
||||
funnelResp := tf.FunnelResponse{
|
||||
FunnelName: f.Name,
|
||||
FunnelID: f.ID.String(),
|
||||
CreatedAt: f.CreatedAt.UnixNano() / 1000000,
|
||||
CreatedBy: f.CreatedBy,
|
||||
OrgID: f.OrgID.String(),
|
||||
UpdatedAt: f.UpdatedAt.UnixNano() / 1000000,
|
||||
UpdatedBy: f.UpdatedBy,
|
||||
Description: f.Description,
|
||||
}
|
||||
|
||||
// Get user email if available
|
||||
if f.CreatedByUser != nil {
|
||||
funnelResp.UserEmail = f.CreatedByUser.Email
|
||||
}
|
||||
|
||||
response = append(response, funnelResp)
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, response)
|
||||
}
|
||||
|
||||
func (handler *handler) Get(rw http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
funnelID := vars["funnel_id"]
|
||||
|
||||
funnel, err := handler.module.Get(r.Context(), funnelID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "funnel not found: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
// Create a response with all funnel details including step IDs
|
||||
response := tf.FunnelResponse{
|
||||
FunnelID: funnel.ID.String(),
|
||||
FunnelName: funnel.Name,
|
||||
Description: funnel.Description,
|
||||
CreatedAt: funnel.CreatedAt.UnixNano() / 1000000,
|
||||
UpdatedAt: funnel.UpdatedAt.UnixNano() / 1000000,
|
||||
CreatedBy: funnel.CreatedBy,
|
||||
UpdatedBy: funnel.UpdatedBy,
|
||||
OrgID: funnel.OrgID.String(),
|
||||
Steps: funnel.Steps,
|
||||
}
|
||||
|
||||
// Add user email if available
|
||||
if funnel.CreatedByUser != nil {
|
||||
response.UserEmail = funnel.CreatedByUser.Email
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, response)
|
||||
}
|
||||
|
||||
func (handler *handler) Delete(rw http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
funnelID := vars["funnel_id"]
|
||||
|
||||
err := handler.module.Delete(r.Context(), funnelID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to delete funnel: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, nil)
|
||||
}
|
||||
|
||||
func (handler *handler) Save(rw http.ResponseWriter, r *http.Request) {
|
||||
var req tf.FunnelRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid request: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
orgID := claims.OrgID
|
||||
usrID := claims.UserID
|
||||
|
||||
funnel, err := handler.module.Get(r.Context(), req.FunnelID.String())
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "funnel not found: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
updateTimestamp := req.Timestamp
|
||||
if updateTimestamp == 0 {
|
||||
updateTimestamp = time.Now().UnixMilli()
|
||||
} else if !tracefunnel.ValidateTimestampIsMilliseconds(updateTimestamp) {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "timestamp must be in milliseconds format (13 digits)"))
|
||||
return
|
||||
}
|
||||
funnel.UpdatedAt = time.Unix(0, updateTimestamp*1000000) // Convert to nanoseconds
|
||||
|
||||
if req.UserID != "" {
|
||||
funnel.UpdatedBy = usrID
|
||||
}
|
||||
|
||||
funnel.Description = req.Description
|
||||
|
||||
if err := handler.module.Save(r.Context(), funnel, funnel.UpdatedBy, orgID); err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to save funnel: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
// Try to fetch metadata from DB
|
||||
createdAt, updatedAt, extraDataFromDB, err := handler.module.GetFunnelMetadata(r.Context(), funnel.ID.String())
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to get funnel metadata: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
resp := tf.FunnelResponse{
|
||||
FunnelName: funnel.Name,
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
CreatedBy: funnel.CreatedBy,
|
||||
UpdatedBy: funnel.UpdatedBy,
|
||||
OrgID: funnel.OrgID.String(),
|
||||
Description: extraDataFromDB,
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, resp)
|
||||
}
|
||||
|
||||
//func (handler *handler) ValidateTraces(rw http.ResponseWriter, r *http.Request) {
|
||||
// vars := mux.Vars(r)
|
||||
// funnelID := vars["funnel_id"]
|
||||
//
|
||||
// funnel, err := handler.module.Get(r.Context(), funnelID)
|
||||
// if err != nil {
|
||||
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "funnel not found: %v", err))
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// var timeRange tf.TimeRange
|
||||
// if err := json.NewDecoder(r.Body).Decode(&timeRange); err != nil {
|
||||
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "error decoding time range: %v", err))
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// response, err := handler.module.ValidateTraces(r.Context(), funnel, timeRange)
|
||||
// if err != nil {
|
||||
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "error validating traces: %v", err))
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// render.Success(rw, http.StatusOK, response)
|
||||
//}
|
||||
//
|
||||
//func (handler *handler) FunnelAnalytics(rw http.ResponseWriter, r *http.Request) {
|
||||
// vars := mux.Vars(r)
|
||||
// funnelID := vars["funnel_id"]
|
||||
//
|
||||
// funnel, err := handler.module.Get(r.Context(), funnelID)
|
||||
// if err != nil {
|
||||
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "funnel not found: %v", err))
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// var timeRange tf.TimeRange
|
||||
// if err := json.NewDecoder(r.Body).Decode(&timeRange); err != nil {
|
||||
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "error decoding time range: %v", err))
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// response, err := handler.module.GetFunnelAnalytics(r.Context(), funnel, timeRange)
|
||||
// if err != nil {
|
||||
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "error getting funnel analytics: %v", err))
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// render.Success(rw, http.StatusOK, response)
|
||||
//}
|
||||
//
|
||||
//func (handler *handler) StepAnalytics(rw http.ResponseWriter, r *http.Request) {
|
||||
// vars := mux.Vars(r)
|
||||
// funnelID := vars["funnel_id"]
|
||||
//
|
||||
// funnel, err := handler.module.Get(r.Context(), funnelID)
|
||||
// if err != nil {
|
||||
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "funnel not found: %v", err))
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// var timeRange tf.TimeRange
|
||||
// if err := json.NewDecoder(r.Body).Decode(&timeRange); err != nil {
|
||||
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "error decoding time range: %v", err))
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// response, err := handler.module.GetStepAnalytics(r.Context(), funnel, timeRange)
|
||||
// if err != nil {
|
||||
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "error getting step analytics: %v", err))
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// render.Success(rw, http.StatusOK, response)
|
||||
//}
|
||||
//
|
||||
//func (handler *handler) SlowestTraces(rw http.ResponseWriter, r *http.Request) {
|
||||
// handler.handleTracesWithLatency(rw, r, false)
|
||||
//}
|
||||
//
|
||||
//func (handler *handler) ErrorTraces(rw http.ResponseWriter, r *http.Request) {
|
||||
// handler.handleTracesWithLatency(rw, r, true)
|
||||
//}
|
||||
//
|
||||
//// handleTracesWithLatency handles both slow and error traces with common logic
|
||||
//func (handler *handler) handleTracesWithLatency(rw http.ResponseWriter, r *http.Request, isError bool) {
|
||||
// funnel, req, err := handler.validateTracesRequest(r)
|
||||
// if err != nil {
|
||||
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "%v", err))
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// if err := tracefunnel.ValidateSteps(funnel, req.StepAOrder, req.StepBOrder); err != nil {
|
||||
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "%v", err))
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// response, err := handler.module.GetSlowestTraces(r.Context(), funnel, req.StepAOrder, req.StepBOrder, req.TimeRange, isError)
|
||||
// if err != nil {
|
||||
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "error getting traces: %v", err))
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// render.Success(rw, http.StatusOK, response)
|
||||
//}
|
||||
//
|
||||
//// validateTracesRequest validates and extracts the request parameters
|
||||
//func (handler *handler) validateTracesRequest(r *http.Request) (*tf.Funnel, *tf.StepTransitionRequest, error) {
|
||||
// vars := mux.Vars(r)
|
||||
// funnelID := vars["funnel_id"]
|
||||
//
|
||||
// funnel, err := handler.module.Get(r.Context(), funnelID)
|
||||
// if err != nil {
|
||||
// return nil, nil, fmt.Errorf("funnel not found: %v", err)
|
||||
// }
|
||||
//
|
||||
// var req tf.StepTransitionRequest
|
||||
// if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
// return nil, nil, fmt.Errorf("invalid request body: %v", err)
|
||||
// }
|
||||
//
|
||||
// return funnel, &req, nil
|
||||
//}
|
||||
@@ -1,220 +0,0 @@
|
||||
package impltracefunnel
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunnel"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type module struct {
|
||||
store traceFunnels.TraceFunnelStore
|
||||
}
|
||||
|
||||
func NewModule(store traceFunnels.TraceFunnelStore) tracefunnel.Module {
|
||||
return &module{
|
||||
store: store,
|
||||
}
|
||||
}
|
||||
|
||||
func (module *module) Create(ctx context.Context, timestamp int64, name string, userID string, orgID string) (*traceFunnels.Funnel, error) {
|
||||
orgUUID, err := valuer.NewUUID(orgID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid org ID: %v", err)
|
||||
}
|
||||
|
||||
funnel := &traceFunnels.Funnel{
|
||||
BaseMetadata: traceFunnels.BaseMetadata{
|
||||
Name: name,
|
||||
OrgID: orgUUID,
|
||||
},
|
||||
}
|
||||
funnel.CreatedAt = time.Unix(0, timestamp*1000000) // Convert to nanoseconds
|
||||
funnel.CreatedBy = userID
|
||||
|
||||
// Set up the user relationship
|
||||
funnel.CreatedByUser = &types.User{
|
||||
ID: userID,
|
||||
}
|
||||
|
||||
if err := module.store.Create(ctx, funnel); err != nil {
|
||||
return nil, fmt.Errorf("failed to create funnel: %v", err)
|
||||
}
|
||||
|
||||
return funnel, nil
|
||||
}
|
||||
|
||||
// Get gets a funnel by ID
|
||||
func (module *module) Get(ctx context.Context, funnelID string) (*traceFunnels.Funnel, error) {
|
||||
uuid, err := valuer.NewUUID(funnelID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid funnel ID: %v", err)
|
||||
}
|
||||
return module.store.Get(ctx, uuid)
|
||||
}
|
||||
|
||||
// Update updates a funnel
|
||||
func (module *module) Update(ctx context.Context, funnel *traceFunnels.Funnel, userID string) error {
|
||||
funnel.UpdatedBy = userID
|
||||
return module.store.Update(ctx, funnel)
|
||||
}
|
||||
|
||||
// List lists all funnels for an organization
|
||||
func (module *module) List(ctx context.Context, orgID string) ([]*traceFunnels.Funnel, error) {
|
||||
orgUUID, err := valuer.NewUUID(orgID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid org ID: %v", err)
|
||||
}
|
||||
|
||||
funnels, err := module.store.List(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Filter by orgID
|
||||
var orgFunnels []*traceFunnels.Funnel
|
||||
for _, f := range funnels {
|
||||
if f.OrgID == orgUUID {
|
||||
orgFunnels = append(orgFunnels, f)
|
||||
}
|
||||
}
|
||||
|
||||
return orgFunnels, nil
|
||||
}
|
||||
|
||||
// Delete deletes a funnel
|
||||
func (module *module) Delete(ctx context.Context, funnelID string) error {
|
||||
uuid, err := valuer.NewUUID(funnelID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid funnel ID: %v", err)
|
||||
}
|
||||
return module.store.Delete(ctx, uuid)
|
||||
}
|
||||
|
||||
// Save saves a funnel
|
||||
func (module *module) Save(ctx context.Context, funnel *traceFunnels.Funnel, userID string, orgID string) error {
|
||||
orgUUID, err := valuer.NewUUID(orgID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid org ID: %v", err)
|
||||
}
|
||||
|
||||
funnel.UpdatedBy = userID
|
||||
funnel.OrgID = orgUUID
|
||||
return module.store.Update(ctx, funnel)
|
||||
}
|
||||
|
||||
// GetFunnelMetadata gets metadata for a funnel
|
||||
func (module *module) GetFunnelMetadata(ctx context.Context, funnelID string) (int64, int64, string, error) {
|
||||
uuid, err := valuer.NewUUID(funnelID)
|
||||
if err != nil {
|
||||
return 0, 0, "", fmt.Errorf("invalid funnel ID: %v", err)
|
||||
}
|
||||
|
||||
funnel, err := module.store.Get(ctx, uuid)
|
||||
if err != nil {
|
||||
return 0, 0, "", err
|
||||
}
|
||||
|
||||
return funnel.CreatedAt.UnixNano() / 1000000, funnel.UpdatedAt.UnixNano() / 1000000, funnel.Description, nil
|
||||
}
|
||||
|
||||
// ValidateTraces validates traces in a funnel
|
||||
//func (module *module) ValidateTraces(ctx context.Context, funnel *traceFunnels.Funnel, timeRange traceFunnels.TimeRange) ([]*v3.Row, error) {
|
||||
// chq, err := tracefunnel.ValidateTraces(funnel, timeRange)
|
||||
// if err != nil {
|
||||
// RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// results, err := aH.reader. GetListResultV3(r.Context(), chq.Query)
|
||||
// if err != nil {
|
||||
// RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
|
||||
// return
|
||||
// }
|
||||
//
|
||||
//}
|
||||
|
||||
// GetFunnelAnalytics gets analytics for a funnel
|
||||
//func (module *module) GetFunnelAnalytics(ctx context.Context, funnel *traceFunnels.Funnel, timeRange traceFunnels.TimeRange) (*traceFunnels.FunnelAnalytics, error) {
|
||||
// if err := tracefunnel.ValidateFunnel(funnel); err != nil {
|
||||
// return nil, fmt.Errorf("invalid funnel: %v", err)
|
||||
// }
|
||||
//
|
||||
// if err := tracefunnel.ValidateTimeRange(timeRange); err != nil {
|
||||
// return nil, fmt.Errorf("invalid time range: %v", err)
|
||||
// }
|
||||
//
|
||||
// _, err := tracefunnel.ValidateTracesWithLatency(funnel, timeRange)
|
||||
// if err != nil {
|
||||
// return nil, fmt.Errorf("error building clickhouse query: %v", err)
|
||||
// }
|
||||
//
|
||||
// // TODO: Execute query and return results
|
||||
// // For now, return empty analytics
|
||||
// return &traceFunnels.FunnelAnalytics{
|
||||
// TotalStart: 0,
|
||||
// TotalComplete: 0,
|
||||
// ErrorCount: 0,
|
||||
// AvgDurationMs: 0,
|
||||
// P99LatencyMs: 0,
|
||||
// ConversionRate: 0,
|
||||
// }, nil
|
||||
//}
|
||||
|
||||
// GetStepAnalytics gets analytics for each step
|
||||
//func (module *module) GetStepAnalytics(ctx context.Context, funnel *traceFunnels.Funnel, timeRange traceFunnels.TimeRange) (*traceFunnels.FunnelAnalytics, error) {
|
||||
// if err := tracefunnel.ValidateFunnel(funnel); err != nil {
|
||||
// return nil, fmt.Errorf("invalid funnel: %v", err)
|
||||
// }
|
||||
//
|
||||
// if err := tracefunnel.ValidateTimeRange(timeRange); err != nil {
|
||||
// return nil, fmt.Errorf("invalid time range: %v", err)
|
||||
// }
|
||||
//
|
||||
// _, err := tracefunnel.GetStepAnalytics(funnel, timeRange)
|
||||
// if err != nil {
|
||||
// return nil, fmt.Errorf("error building clickhouse query: %v", err)
|
||||
// }
|
||||
//
|
||||
// // TODO: Execute query and return results
|
||||
// // For now, return empty analytics
|
||||
// return &traceFunnels.FunnelAnalytics{
|
||||
// TotalStart: 0,
|
||||
// TotalComplete: 0,
|
||||
// ErrorCount: 0,
|
||||
// AvgDurationMs: 0,
|
||||
// P99LatencyMs: 0,
|
||||
// ConversionRate: 0,
|
||||
// }, nil
|
||||
//}
|
||||
|
||||
// GetSlowestTraces gets the slowest traces between two steps
|
||||
//func (module *module) GetSlowestTraces(ctx context.Context, funnel *traceFunnels.Funnel, stepAOrder, stepBOrder int64, timeRange traceFunnels.TimeRange, isError bool) (*traceFunnels.ValidTracesResponse, error) {
|
||||
// if err := tracefunnel.ValidateFunnel(funnel); err != nil {
|
||||
// return nil, fmt.Errorf("invalid funnel: %v", err)
|
||||
// }
|
||||
//
|
||||
// if err := tracefunnel.ValidateTimeRange(timeRange); err != nil {
|
||||
// return nil, fmt.Errorf("invalid time range: %v", err)
|
||||
// }
|
||||
//
|
||||
// _, err := tracefunnel.GetSlowestTraces(funnel, stepAOrder, stepBOrder, timeRange, isError)
|
||||
// if err != nil {
|
||||
// return nil, fmt.Errorf("error building clickhouse query: %v", err)
|
||||
// }
|
||||
//
|
||||
// // TODO: Execute query and return results
|
||||
// // For now, return empty response
|
||||
// return &traceFunnels.ValidTracesResponse{
|
||||
// TraceIDs: []string{},
|
||||
// }, nil
|
||||
//}
|
||||
|
||||
//UpdateMetadata updates the metadata of a funnel
|
||||
//func (module *module) UpdateMetadata(ctx context.Context, funnelID valuer.UUID, name, description string, userID string) error {
|
||||
// return module.store.UpdateMetadata(ctx, funnelID, name, description, userID)
|
||||
//}
|
||||
@@ -1,220 +0,0 @@
|
||||
package impltracefunnel
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunnel"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type store struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func NewStore(sqlstore sqlstore.SQLStore) traceFunnels.TraceFunnelStore {
|
||||
return &store{sqlstore: sqlstore}
|
||||
}
|
||||
|
||||
func (store *store) Create(ctx context.Context, funnel *traceFunnels.Funnel) error {
|
||||
if funnel.ID.IsZero() {
|
||||
funnel.ID = valuer.GenerateUUID()
|
||||
}
|
||||
|
||||
if funnel.CreatedAt.IsZero() {
|
||||
funnel.CreatedAt = time.Now()
|
||||
}
|
||||
if funnel.UpdatedAt.IsZero() {
|
||||
funnel.UpdatedAt = time.Now()
|
||||
}
|
||||
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewInsert().
|
||||
Model(funnel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create funnel: %v", err)
|
||||
}
|
||||
|
||||
if funnel.CreatedByUser != nil {
|
||||
_, err = store.sqlstore.BunDB().NewUpdate().
|
||||
Model(funnel).
|
||||
Set("created_by = ?", funnel.CreatedByUser.ID).
|
||||
Where("id = ?", funnel.ID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to update funnel user relationship: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get retrieves a funnel by ID
|
||||
func (store *store) Get(ctx context.Context, uuid valuer.UUID) (*traceFunnels.Funnel, error) {
|
||||
funnel := &traceFunnels.Funnel{}
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(funnel).
|
||||
Relation("CreatedByUser").
|
||||
Where("?TableAlias.id = ?", uuid).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get funnel: %v", err)
|
||||
}
|
||||
return funnel, nil
|
||||
}
|
||||
|
||||
// Update updates an existing funnel
|
||||
func (store *store) Update(ctx context.Context, funnel *traceFunnels.Funnel) error {
|
||||
// UpdateSteps the updated_at timestamp
|
||||
funnel.UpdatedAt = time.Now()
|
||||
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewUpdate().
|
||||
Model(funnel).
|
||||
WherePK().
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to update funnel: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// List retrieves all funnels
|
||||
func (store *store) List(ctx context.Context) ([]*traceFunnels.Funnel, error) {
|
||||
var funnels []*traceFunnels.Funnel
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(&funnels).
|
||||
Relation("CreatedByUser").
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to list funnels: %v", err)
|
||||
}
|
||||
return funnels, nil
|
||||
}
|
||||
|
||||
// Delete removes a funnel by ID
|
||||
func (store *store) Delete(ctx context.Context, uuid valuer.UUID) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewDelete().
|
||||
Model((*traceFunnels.Funnel)(nil)).
|
||||
Where("id = ?", uuid).Exec(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to delete funnel: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ListByOrg retrieves all funnels for a specific organization
|
||||
//func (store *store) ListByOrg(ctx context.Context, orgID valuer.UUID) ([]*traceFunnels.Funnel, error) {
|
||||
// var funnels []*traceFunnels.Funnel
|
||||
// err := store.
|
||||
// sqlstore.
|
||||
// BunDB().
|
||||
// NewSelect().
|
||||
// Model(&funnels).
|
||||
// Relation("CreatedByUser").
|
||||
// Where("org_id = ?", orgID).
|
||||
// Scan(ctx)
|
||||
// if err != nil {
|
||||
// return nil, fmt.Errorf("failed to list funnels by org: %v", err)
|
||||
// }
|
||||
// return funnels, nil
|
||||
//}
|
||||
|
||||
// GetByIDAndOrg retrieves a funnel by ID and organization ID
|
||||
//func (store *store) GetByIDAndOrg(ctx context.Context, id, orgID valuer.UUID) (*traceFunnels.Funnel, error) {
|
||||
// funnel := &traceFunnels.Funnel{}
|
||||
// err := store.
|
||||
// sqlstore.
|
||||
// BunDB().
|
||||
// NewSelect().
|
||||
// Model(funnel).
|
||||
// Relation("CreatedByUser").
|
||||
// Where("?TableAlias.id = ? AND ?TableAlias.org_id = ?", id, orgID).
|
||||
// Scan(ctx)
|
||||
// if err != nil {
|
||||
// return nil, fmt.Errorf("failed to get funnel by ID and org: %v", err)
|
||||
// }
|
||||
// return funnel, nil
|
||||
//}
|
||||
|
||||
// UpdateSteps updates the steps of a funnel
|
||||
//func (store *store) UpdateSteps(ctx context.Context, funnelID valuer.UUID, steps []traceFunnels.FunnelStep) error {
|
||||
// _, err := store.
|
||||
// sqlstore.
|
||||
// BunDB().
|
||||
// NewUpdate().
|
||||
// Model((*traceFunnels.Funnel)(nil)).
|
||||
// Set("steps = ?", steps).
|
||||
// Where("id = ?", funnelID).
|
||||
// Exec(ctx)
|
||||
// if err != nil {
|
||||
// return fmt.Errorf("failed to update funnel steps: %v", err)
|
||||
// }
|
||||
// return nil
|
||||
//}
|
||||
|
||||
// UpdateMetadata updates the metadata of a funnel
|
||||
//func (store *store) UpdateMetadata(ctx context.Context, funnelID valuer.UUID, name, description string, userID string) error {
|
||||
//
|
||||
// // First get the current funnel to preserve other fields
|
||||
// funnel := &traceFunnels.Funnel{}
|
||||
// err := store.
|
||||
// sqlstore.
|
||||
// BunDB().
|
||||
// NewSelect().
|
||||
// Model(funnel).
|
||||
// Where("id = ?", funnelID).
|
||||
// Scan(ctx)
|
||||
// if err != nil {
|
||||
// return fmt.Errorf("failed to get funnel: %v", err)
|
||||
// }
|
||||
//
|
||||
// // UpdateSteps the fields
|
||||
// funnel.Name = name
|
||||
// funnel.Description = description
|
||||
// funnel.UpdatedAt = time.Now()
|
||||
// funnel.UpdatedBy = userID
|
||||
//
|
||||
// // Save the updated funnel
|
||||
// _, err = store.
|
||||
// sqlstore.
|
||||
// BunDB().
|
||||
// NewUpdate().
|
||||
// Model(funnel).
|
||||
// WherePK().
|
||||
// Exec(ctx)
|
||||
// if err != nil {
|
||||
// return fmt.Errorf("failed to update funnel metadata: %v", err)
|
||||
// }
|
||||
//
|
||||
// // Verify the update
|
||||
// updatedFunnel := &traceFunnels.Funnel{}
|
||||
// err = store.
|
||||
// sqlstore.
|
||||
// BunDB().
|
||||
// NewSelect().
|
||||
// Model(updatedFunnel).
|
||||
// Where("id = ?", funnelID).
|
||||
// Scan(ctx)
|
||||
// if err != nil {
|
||||
// return fmt.Errorf("failed to verify update: %v", err)
|
||||
// }
|
||||
//
|
||||
// return nil
|
||||
//}
|
||||
@@ -1,442 +0,0 @@
|
||||
package tracefunnel
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
tracefunnel "github.com/SigNoz/signoz/pkg/types/tracefunnel"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// GetSlowestTraces builds a ClickHouse query to get the slowest traces between two steps
|
||||
func GetSlowestTraces(funnel *tracefunnel.Funnel, stepAOrder, stepBOrder int64, timeRange tracefunnel.TimeRange, withErrors bool) (*v3.ClickHouseQuery, error) {
|
||||
// Find steps by order
|
||||
var stepA, stepB *tracefunnel.FunnelStep
|
||||
for i := range funnel.Steps {
|
||||
if funnel.Steps[i].Order == stepAOrder {
|
||||
stepA = &funnel.Steps[i]
|
||||
}
|
||||
if funnel.Steps[i].Order == stepBOrder {
|
||||
stepB = &funnel.Steps[i]
|
||||
}
|
||||
}
|
||||
|
||||
if stepA == nil || stepB == nil {
|
||||
return nil, fmt.Errorf("step not found")
|
||||
}
|
||||
|
||||
// Build having clause based on withErrors flag
|
||||
havingClause := ""
|
||||
if withErrors {
|
||||
havingClause = "HAVING has_error = 1"
|
||||
}
|
||||
|
||||
// Build filter strings for each step
|
||||
stepAFilters := ""
|
||||
if stepA.Filters != nil && len(stepA.Filters.Items) > 0 {
|
||||
// ToDO: need to implement where clause filtering with minimal code duplication
|
||||
stepAFilters = "/* Custom filters for step A would be applied here */"
|
||||
}
|
||||
|
||||
stepBFilters := ""
|
||||
if stepB.Filters != nil && len(stepB.Filters.Items) > 0 {
|
||||
// ToDO: need to implement where clause filtering with minimal code duplication
|
||||
stepBFilters = "/* Custom filters for step B would be applied here */"
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
WITH
|
||||
toUInt64(%d) AS start_time,
|
||||
toUInt64(%d) AS end_time,
|
||||
toString(intDiv(start_time, 1000000000) - 1800) AS tsBucketStart,
|
||||
toString(intDiv(end_time, 1000000000)) AS tsBucketEnd
|
||||
SELECT
|
||||
trace_id,
|
||||
concat(toString((max_end_time_ns - min_start_time_ns) / 1e6), ' ms') AS duration_ms,
|
||||
COUNT(*) AS span_count
|
||||
FROM (
|
||||
SELECT
|
||||
s1.trace_id,
|
||||
MIN(toUnixTimestamp64Nano(s1.timestamp)) AS min_start_time_ns,
|
||||
MAX(toUnixTimestamp64Nano(s2.timestamp) + s2.duration_nano) AS max_end_time_ns,
|
||||
MAX(s1.has_error OR s2.has_error) AS has_error
|
||||
FROM %s AS s1
|
||||
JOIN %s AS s2
|
||||
ON s1.trace_id = s2.trace_id
|
||||
WHERE s1.resource_string_service$$name = '%s'
|
||||
AND s1.name = '%s'
|
||||
AND s2.resource_string_service$$name = '%s'
|
||||
AND s2.name = '%s'
|
||||
AND s1.timestamp BETWEEN toString(start_time) AND toString(end_time)
|
||||
AND s1.ts_bucket_start BETWEEN tsBucketStart AND tsBucketEnd
|
||||
AND s2.timestamp BETWEEN toString(start_time) AND toString(end_time)
|
||||
AND s2.ts_bucket_start BETWEEN tsBucketStart AND tsBucketEnd
|
||||
%s
|
||||
%s
|
||||
GROUP BY s1.trace_id
|
||||
%s
|
||||
) AS trace_durations
|
||||
JOIN %s AS spans
|
||||
ON spans.trace_id = trace_durations.trace_id
|
||||
WHERE spans.timestamp BETWEEN toString(start_time) AND toString(end_time)
|
||||
AND spans.ts_bucket_start BETWEEN tsBucketStart AND tsBucketEnd
|
||||
GROUP BY trace_id, duration_ms
|
||||
ORDER BY CAST(replaceRegexpAll(duration_ms, ' ms$', '') AS Float64) DESC
|
||||
LIMIT 5`,
|
||||
timeRange.StartTime,
|
||||
timeRange.EndTime,
|
||||
TracesTable,
|
||||
TracesTable,
|
||||
escapeString(stepA.ServiceName),
|
||||
escapeString(stepA.SpanName),
|
||||
escapeString(stepB.ServiceName),
|
||||
escapeString(stepB.SpanName),
|
||||
stepAFilters,
|
||||
stepBFilters,
|
||||
havingClause,
|
||||
TracesTable,
|
||||
)
|
||||
|
||||
return &v3.ClickHouseQuery{
|
||||
Query: query,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// GetStepAnalytics builds a ClickHouse query to get analytics for each step
|
||||
func GetStepAnalytics(funnel *tracefunnel.Funnel, timeRange tracefunnel.TimeRange) (*v3.ClickHouseQuery, error) {
|
||||
if len(funnel.Steps) == 0 {
|
||||
return nil, fmt.Errorf("funnel has no steps")
|
||||
}
|
||||
|
||||
// Build funnel steps array
|
||||
var steps []string
|
||||
for _, step := range funnel.Steps {
|
||||
steps = append(steps, fmt.Sprintf("('%s', '%s')",
|
||||
escapeString(step.ServiceName), escapeString(step.SpanName)))
|
||||
}
|
||||
stepsArray := fmt.Sprintf("array(%s)", strings.Join(steps, ","))
|
||||
|
||||
// Build step CTEs
|
||||
var stepCTEs []string
|
||||
for i, step := range funnel.Steps {
|
||||
filterStr := ""
|
||||
if step.Filters != nil && len(step.Filters.Items) > 0 {
|
||||
// ToDO: need to implement where clause filtering with minimal code duplication
|
||||
filterStr = "/* Custom filters would be applied here */"
|
||||
}
|
||||
|
||||
cte := fmt.Sprintf(`
|
||||
step%d_traces AS (
|
||||
SELECT DISTINCT trace_id
|
||||
FROM %s
|
||||
WHERE resource_string_service$$name = '%s'
|
||||
AND name = '%s'
|
||||
AND timestamp BETWEEN toString(start_time) AND toString(end_time)
|
||||
AND ts_bucket_start BETWEEN tsBucketStart AND tsBucketEnd
|
||||
%s
|
||||
)`,
|
||||
i+1,
|
||||
TracesTable,
|
||||
escapeString(step.ServiceName),
|
||||
escapeString(step.SpanName),
|
||||
filterStr,
|
||||
)
|
||||
stepCTEs = append(stepCTEs, cte)
|
||||
}
|
||||
|
||||
// Build intersecting traces CTE
|
||||
var intersections []string
|
||||
for i := 1; i <= len(funnel.Steps); i++ {
|
||||
intersections = append(intersections, fmt.Sprintf("SELECT trace_id FROM step%d_traces", i))
|
||||
}
|
||||
intersectingTracesCTE := fmt.Sprintf(`
|
||||
intersecting_traces AS (
|
||||
%s
|
||||
)`,
|
||||
strings.Join(intersections, "\nINTERSECT\n"),
|
||||
)
|
||||
|
||||
// Build CASE expressions for each step
|
||||
var caseExpressions []string
|
||||
for i, step := range funnel.Steps {
|
||||
totalSpansExpr := fmt.Sprintf(`
|
||||
COUNT(CASE WHEN resource_string_service$$name = '%s'
|
||||
AND name = '%s'
|
||||
THEN trace_id END) AS total_s%d_spans`,
|
||||
escapeString(step.ServiceName), escapeString(step.SpanName), i+1)
|
||||
|
||||
erroredSpansExpr := fmt.Sprintf(`
|
||||
COUNT(CASE WHEN resource_string_service$$name = '%s'
|
||||
AND name = '%s'
|
||||
AND has_error = true
|
||||
THEN trace_id END) AS total_s%d_errored_spans`,
|
||||
escapeString(step.ServiceName), escapeString(step.SpanName), i+1)
|
||||
|
||||
caseExpressions = append(caseExpressions, totalSpansExpr, erroredSpansExpr)
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
WITH
|
||||
toUInt64(%d) AS start_time,
|
||||
toUInt64(%d) AS end_time,
|
||||
toString(intDiv(start_time, 1000000000) - 1800) AS tsBucketStart,
|
||||
toString(intDiv(end_time, 1000000000)) AS tsBucketEnd,
|
||||
%s AS funnel_steps,
|
||||
%s,
|
||||
%s
|
||||
SELECT
|
||||
%s
|
||||
FROM %s
|
||||
WHERE trace_id IN (SELECT trace_id FROM intersecting_traces)
|
||||
AND timestamp BETWEEN toString(start_time) AND toString(end_time)
|
||||
AND ts_bucket_start BETWEEN tsBucketStart AND tsBucketEnd`,
|
||||
timeRange.StartTime,
|
||||
timeRange.EndTime,
|
||||
stepsArray,
|
||||
strings.Join(stepCTEs, ",\n"),
|
||||
intersectingTracesCTE,
|
||||
strings.Join(caseExpressions, ",\n "),
|
||||
TracesTable,
|
||||
)
|
||||
|
||||
return &v3.ClickHouseQuery{
|
||||
Query: query,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// ValidateTracesWithLatency builds a ClickHouse query to validate traces with latency information
|
||||
func ValidateTracesWithLatency(funnel *tracefunnel.Funnel, timeRange tracefunnel.TimeRange) (*v3.ClickHouseQuery, error) {
|
||||
filters, err := buildFunnelFiltersWithLatency(funnel)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error building funnel filters with latency: %w", err)
|
||||
}
|
||||
|
||||
query := generateFunnelSQLWithLatency(timeRange.StartTime, timeRange.EndTime, filters)
|
||||
|
||||
return &v3.ClickHouseQuery{
|
||||
Query: query,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func generateFunnelSQLWithLatency(start, end int64, filters []tracefunnel.FunnelStepFilter) string {
|
||||
var expressions []string
|
||||
|
||||
// Convert timestamps to nanoseconds
|
||||
startTime := fmt.Sprintf("toUInt64(%d)", start)
|
||||
endTime := fmt.Sprintf("toUInt64(%d)", end)
|
||||
|
||||
expressions = append(expressions, fmt.Sprintf("%s AS start_time", startTime))
|
||||
expressions = append(expressions, fmt.Sprintf("%s AS end_time", endTime))
|
||||
expressions = append(expressions, "toString(intDiv(start_time, 1000000000) - 1800) AS tsBucketStart")
|
||||
expressions = append(expressions, "toString(intDiv(end_time, 1000000000)) AS tsBucketEnd")
|
||||
expressions = append(expressions, "(end_time - start_time) / 1e9 AS total_time_seconds")
|
||||
|
||||
// Define step configurations dynamically
|
||||
for _, f := range filters {
|
||||
expressions = append(expressions, fmt.Sprintf("('%s', '%s') AS s%d_config",
|
||||
escapeString(f.ServiceName),
|
||||
escapeString(f.SpanName),
|
||||
f.StepNumber))
|
||||
}
|
||||
|
||||
withClause := "WITH \n" + strings.Join(expressions, ",\n") + "\n"
|
||||
|
||||
// Build step raw expressions and cumulative logic
|
||||
var stepRaws []string
|
||||
var cumulativeLogic []string
|
||||
var filterConditions []string
|
||||
|
||||
stepCount := len(filters)
|
||||
|
||||
// Build raw step detection
|
||||
for i := 1; i <= stepCount; i++ {
|
||||
stepRaws = append(stepRaws, fmt.Sprintf(
|
||||
"MAX(CASE WHEN (resource_string_service$$name, name) = s%d_config THEN 1 ELSE 0 END) AS has_s%d_raw", i, i))
|
||||
filterConditions = append(filterConditions, fmt.Sprintf("s%d_config", i))
|
||||
}
|
||||
|
||||
// Build cumulative IF logic
|
||||
for i := 1; i <= stepCount; i++ {
|
||||
if i == 1 {
|
||||
cumulativeLogic = append(cumulativeLogic, fmt.Sprintf(`
|
||||
IF(MAX(CASE WHEN (resource_string_service$$name, name) = s1_config THEN 1 ELSE 0 END) = 1, 1, 0) AS has_s1`))
|
||||
} else {
|
||||
innerIf := "IF(MAX(CASE WHEN (resource_string_service$$name, name) = s1_config THEN 1 ELSE 0 END) = 1, 1, 0)"
|
||||
for j := 2; j < i; j++ {
|
||||
innerIf = fmt.Sprintf(`IF(%s = 1 AND MAX(CASE WHEN (resource_string_service$$name, name) = s%d_config THEN 1 ELSE 0 END) = 1, 1, 0)`, innerIf, j)
|
||||
}
|
||||
cumulativeLogic = append(cumulativeLogic, fmt.Sprintf(`
|
||||
IF(
|
||||
%s = 1 AND MAX(CASE WHEN (resource_string_service$$name, name) = s%d_config THEN 1 ELSE 0 END) = 1,
|
||||
1, 0
|
||||
) AS has_s%d`, innerIf, i, i))
|
||||
}
|
||||
}
|
||||
|
||||
// Final SELECT counts using FILTER clauses
|
||||
var stepCounts []string
|
||||
for i := 1; i <= stepCount; i++ {
|
||||
stepCounts = append(stepCounts, fmt.Sprintf("COUNT(DISTINCT trace_id) FILTER (WHERE has_s%d = 1) AS step%d_count", i, i))
|
||||
}
|
||||
|
||||
// Final query assembly
|
||||
lastStep := fmt.Sprint(stepCount)
|
||||
query := withClause + `
|
||||
SELECT
|
||||
` + strings.Join(stepCounts, ",\n ") + `,
|
||||
|
||||
IF(total_time_seconds = 0 OR COUNT(DISTINCT trace_id) FILTER (WHERE has_s` + lastStep + ` = 1) = 0, 0,
|
||||
COUNT(DISTINCT trace_id) FILTER (WHERE has_s` + lastStep + ` = 1) / total_time_seconds
|
||||
) AS avg_rate,
|
||||
|
||||
COUNT(DISTINCT trace_id) FILTER (WHERE has_s` + lastStep + ` = 1 AND has_error = true) AS errors,
|
||||
|
||||
IF(COUNT(*) = 0, 0, avg(trace_duration)) AS avg_duration,
|
||||
|
||||
IF(COUNT(*) = 0, 0, quantile(0.99)(trace_duration)) AS p99_latency,
|
||||
|
||||
IF(COUNT(DISTINCT trace_id) FILTER (WHERE has_s1 = 1) = 0, 0,
|
||||
100.0 * COUNT(DISTINCT trace_id) FILTER (WHERE has_s` + lastStep + ` = 1) /
|
||||
COUNT(DISTINCT trace_id) FILTER (WHERE has_s1 = 1)
|
||||
) AS conversion_rate
|
||||
|
||||
FROM (
|
||||
SELECT
|
||||
trace_id,
|
||||
MAX(has_error) AS has_error,
|
||||
` + strings.Join(stepRaws, ",\n ") + `,
|
||||
MAX(toUnixTimestamp64Nano(timestamp) + duration_nano) - MIN(toUnixTimestamp64Nano(timestamp)) AS trace_duration,
|
||||
` + strings.Join(cumulativeLogic, ",\n ") + `
|
||||
FROM ` + TracesTable + `
|
||||
WHERE
|
||||
timestamp BETWEEN toString(start_time) AND toString(end_time)
|
||||
AND ts_bucket_start BETWEEN tsBucketStart AND tsBucketEnd
|
||||
AND (resource_string_service$$name, name) IN (` + strings.Join(filterConditions, ", ") + `)
|
||||
GROUP BY trace_id
|
||||
) AS funnel_data;`
|
||||
|
||||
return query
|
||||
}
|
||||
|
||||
func buildFunnelFiltersWithLatency(funnel *tracefunnel.Funnel) ([]tracefunnel.FunnelStepFilter, error) {
|
||||
if funnel == nil {
|
||||
return nil, fmt.Errorf("funnel cannot be nil")
|
||||
}
|
||||
|
||||
if len(funnel.Steps) == 0 {
|
||||
return nil, fmt.Errorf("funnel must have at least one step")
|
||||
}
|
||||
|
||||
filters := make([]tracefunnel.FunnelStepFilter, len(funnel.Steps))
|
||||
|
||||
for i, step := range funnel.Steps {
|
||||
latencyPointer := "start" // Default value
|
||||
if step.LatencyPointer != "" {
|
||||
latencyPointer = step.LatencyPointer
|
||||
}
|
||||
|
||||
filters[i] = tracefunnel.FunnelStepFilter{
|
||||
StepNumber: i + 1,
|
||||
ServiceName: step.ServiceName,
|
||||
SpanName: step.SpanName,
|
||||
LatencyPointer: latencyPointer,
|
||||
CustomFilters: step.Filters,
|
||||
}
|
||||
}
|
||||
|
||||
return filters, nil
|
||||
}
|
||||
|
||||
func buildFunnelFilters(funnel *tracefunnel.Funnel) ([]tracefunnel.FunnelStepFilter, error) {
|
||||
if funnel == nil {
|
||||
return nil, fmt.Errorf("funnel cannot be nil")
|
||||
}
|
||||
|
||||
if len(funnel.Steps) == 0 {
|
||||
return nil, fmt.Errorf("funnel must have at least one step")
|
||||
}
|
||||
|
||||
filters := make([]tracefunnel.FunnelStepFilter, len(funnel.Steps))
|
||||
|
||||
for i, step := range funnel.Steps {
|
||||
filters[i] = tracefunnel.FunnelStepFilter{
|
||||
StepNumber: i + 1,
|
||||
ServiceName: step.ServiceName,
|
||||
SpanName: step.SpanName,
|
||||
CustomFilters: step.Filters,
|
||||
}
|
||||
}
|
||||
|
||||
return filters, nil
|
||||
}
|
||||
|
||||
func escapeString(s string) string {
|
||||
// Replace single quotes with double single quotes to escape them in SQL
|
||||
return strings.ReplaceAll(s, "'", "''")
|
||||
}
|
||||
|
||||
const TracesTable = "signoz_traces.signoz_index_v3"
|
||||
|
||||
func generateFunnelSQL(start, end int64, filters []tracefunnel.FunnelStepFilter) string {
|
||||
var expressions []string
|
||||
|
||||
// Basic time expressions.
|
||||
expressions = append(expressions, fmt.Sprintf("toUInt64(%d) AS start_time", start))
|
||||
expressions = append(expressions, fmt.Sprintf("toUInt64(%d) AS end_time", end))
|
||||
expressions = append(expressions, "toString(intDiv(start_time, 1000000000) - 1800) AS tsBucketStart")
|
||||
expressions = append(expressions, "toString(intDiv(end_time, 1000000000)) AS tsBucketEnd")
|
||||
|
||||
// Add service and span alias definitions from each filter.
|
||||
for _, f := range filters {
|
||||
expressions = append(expressions, fmt.Sprintf("'%s' AS service_%d", escapeString(f.ServiceName), f.StepNumber))
|
||||
expressions = append(expressions, fmt.Sprintf("'%s' AS span_%d", escapeString(f.SpanName), f.StepNumber))
|
||||
}
|
||||
|
||||
// Add the CTE for each step.
|
||||
for _, f := range filters {
|
||||
cte := fmt.Sprintf(`step%d_traces AS (
|
||||
SELECT DISTINCT trace_id
|
||||
FROM %s
|
||||
WHERE serviceName = service_%d
|
||||
AND name = span_%d
|
||||
AND timestamp BETWEEN toString(start_time) AND toString(end_time)
|
||||
AND ts_bucket_start BETWEEN tsBucketStart AND tsBucketEnd
|
||||
)`, f.StepNumber, TracesTable, f.StepNumber, f.StepNumber)
|
||||
expressions = append(expressions, cte)
|
||||
}
|
||||
|
||||
withClause := "WITH \n" + strings.Join(expressions, ",\n") + "\n"
|
||||
|
||||
// Build the intersect clause for each step.
|
||||
var intersectQueries []string
|
||||
for _, f := range filters {
|
||||
intersectQueries = append(intersectQueries, fmt.Sprintf("SELECT trace_id FROM step%d_traces", f.StepNumber))
|
||||
}
|
||||
intersectClause := strings.Join(intersectQueries, "\nINTERSECT\n")
|
||||
|
||||
query := withClause + `
|
||||
SELECT trace_id
|
||||
FROM ` + TracesTable + `
|
||||
WHERE trace_id IN (
|
||||
` + intersectClause + `
|
||||
)
|
||||
AND timestamp BETWEEN toString(start_time) AND toString(end_time)
|
||||
AND ts_bucket_start BETWEEN tsBucketStart AND tsBucketEnd
|
||||
GROUP BY trace_id
|
||||
LIMIT 5
|
||||
`
|
||||
return query
|
||||
}
|
||||
|
||||
// ValidateTraces builds a ClickHouse query to validate traces in a funnel
|
||||
func ValidateTraces(funnel *tracefunnel.Funnel, timeRange tracefunnel.TimeRange) (*v3.ClickHouseQuery, error) {
|
||||
filters, err := buildFunnelFilters(funnel)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error building funnel filters: %w", err)
|
||||
}
|
||||
|
||||
query := generateFunnelSQL(timeRange.StartTime, timeRange.EndTime, filters)
|
||||
|
||||
return &v3.ClickHouseQuery{
|
||||
Query: query,
|
||||
}, nil
|
||||
}
|
||||
@@ -1,67 +0,0 @@
|
||||
package tracefunnel
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunnel"
|
||||
)
|
||||
|
||||
// Module defines the interface for trace funnel operations
|
||||
type Module interface {
|
||||
// operations on funnel
|
||||
Create(ctx context.Context, timestamp int64, name string, userID string, orgID string) (*traceFunnels.Funnel, error)
|
||||
|
||||
Get(ctx context.Context, funnelID string) (*traceFunnels.Funnel, error)
|
||||
|
||||
Update(ctx context.Context, funnel *traceFunnels.Funnel, userID string) error
|
||||
|
||||
List(ctx context.Context, orgID string) ([]*traceFunnels.Funnel, error)
|
||||
|
||||
Delete(ctx context.Context, funnelID string) error
|
||||
|
||||
Save(ctx context.Context, funnel *traceFunnels.Funnel, userID string, orgID string) error
|
||||
|
||||
GetFunnelMetadata(ctx context.Context, funnelID string) (int64, int64, string, error)
|
||||
//
|
||||
//GetFunnelAnalytics(ctx context.Context, funnel *traceFunnels.Funnel, timeRange traceFunnels.TimeRange) (*traceFunnels.FunnelAnalytics, error)
|
||||
//
|
||||
//GetStepAnalytics(ctx context.Context, funnel *traceFunnels.Funnel, timeRange traceFunnels.TimeRange) (*traceFunnels.FunnelAnalytics, error)
|
||||
//
|
||||
//GetSlowestTraces(ctx context.Context, funnel *traceFunnels.Funnel, stepAOrder, stepBOrder int64, timeRange traceFunnels.TimeRange, isError bool) (*traceFunnels.ValidTracesResponse, error)
|
||||
|
||||
// updates funnel metadata
|
||||
//UpdateMetadata(ctx context.Context, funnelID valuer.UUID, name, description string, userID string) error
|
||||
|
||||
// validates funnel
|
||||
//ValidateTraces(ctx context.Context, funnel *traceFunnels.Funnel, timeRange traceFunnels.TimeRange) ([]*v3.Row, error)
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
// CRUD on funnel
|
||||
New(http.ResponseWriter, *http.Request)
|
||||
|
||||
UpdateSteps(http.ResponseWriter, *http.Request)
|
||||
|
||||
UpdateFunnel(http.ResponseWriter, *http.Request)
|
||||
|
||||
List(http.ResponseWriter, *http.Request)
|
||||
|
||||
Get(http.ResponseWriter, *http.Request)
|
||||
|
||||
Delete(http.ResponseWriter, *http.Request)
|
||||
|
||||
Save(http.ResponseWriter, *http.Request)
|
||||
|
||||
// validator handlers
|
||||
//ValidateTraces(http.ResponseWriter, *http.Request)
|
||||
//
|
||||
//// Analytics handlers
|
||||
//FunnelAnalytics(http.ResponseWriter, *http.Request)
|
||||
//
|
||||
//StepAnalytics(http.ResponseWriter, *http.Request)
|
||||
//
|
||||
//SlowestTraces(http.ResponseWriter, *http.Request)
|
||||
//
|
||||
//ErrorTraces(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
@@ -1,171 +0,0 @@
|
||||
package tracefunnel
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
tracefunnel "github.com/SigNoz/signoz/pkg/types/tracefunnel"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// ValidateTimestamp validates a timestamp
|
||||
func ValidateTimestamp(timestamp int64, fieldName string) error {
|
||||
if timestamp == 0 {
|
||||
return fmt.Errorf("%s is required", fieldName)
|
||||
}
|
||||
if timestamp < 0 {
|
||||
return fmt.Errorf("%s must be positive", fieldName)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ValidateTimestampIsMilliseconds validates that a timestamp is in milliseconds
|
||||
func ValidateTimestampIsMilliseconds(timestamp int64) bool {
|
||||
// Check if timestamp is in milliseconds (13 digits)
|
||||
return timestamp >= 1000000000000 && timestamp <= 9999999999999
|
||||
}
|
||||
|
||||
// ValidateFunnelSteps validates funnel steps
|
||||
func ValidateFunnelSteps(steps []tracefunnel.FunnelStep) error {
|
||||
if len(steps) < 2 {
|
||||
return fmt.Errorf("funnel must have at least 2 steps")
|
||||
}
|
||||
|
||||
for i, step := range steps {
|
||||
if step.ServiceName == "" {
|
||||
return fmt.Errorf("step %d: service name is required", i+1)
|
||||
}
|
||||
if step.SpanName == "" {
|
||||
return fmt.Errorf("step %d: span name is required", i+1)
|
||||
}
|
||||
if step.Order < 0 {
|
||||
return fmt.Errorf("step %d: order must be non-negative", i+1)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// NormalizeFunnelSteps normalizes step orders to be sequential
|
||||
func NormalizeFunnelSteps(steps []tracefunnel.FunnelStep) []tracefunnel.FunnelStep {
|
||||
// Sort steps by order
|
||||
sort.Slice(steps, func(i, j int) bool {
|
||||
return steps[i].Order < steps[j].Order
|
||||
})
|
||||
|
||||
// Normalize orders to be sequential
|
||||
for i := range steps {
|
||||
steps[i].Order = int64(i + 1)
|
||||
}
|
||||
|
||||
return steps
|
||||
}
|
||||
|
||||
//// ValidateSteps checks if the requested steps exist in the funnel
|
||||
//func ValidateSteps(funnel *tracefunnel.Funnel, stepAOrder, stepBOrder int64) error {
|
||||
// stepAExists, stepBExists := false, false
|
||||
// for _, step := range funnel.Steps {
|
||||
// if step.Order == stepAOrder {
|
||||
// stepAExists = true
|
||||
// }
|
||||
// if step.Order == stepBOrder {
|
||||
// stepBExists = true
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// if !stepAExists || !stepBExists {
|
||||
// return fmt.Errorf("one or both steps not found. Step A Order: %d, Step B Order: %d", stepAOrder, stepBOrder)
|
||||
// }
|
||||
//
|
||||
// return nil
|
||||
//}
|
||||
|
||||
//// ValidateFunnel validates a funnel's data
|
||||
//func ValidateFunnel(funnel *tracefunnel.Funnel) error {
|
||||
// if funnel == nil {
|
||||
// return fmt.Errorf("funnel cannot be nil")
|
||||
// }
|
||||
//
|
||||
// if len(funnel.Steps) < 2 {
|
||||
// return fmt.Errorf("funnel must have at least 2 steps")
|
||||
// }
|
||||
//
|
||||
// // Validate each step
|
||||
// for i, step := range funnel.Steps {
|
||||
// if err := ValidateStep(step, i+1); err != nil {
|
||||
// return err
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// return nil
|
||||
//}
|
||||
|
||||
// ValidateStep validates a single funnel step
|
||||
//func ValidateStep(step tracefunnel.FunnelStep, stepNum int) error {
|
||||
// if step.ServiceName == "" {
|
||||
// return fmt.Errorf("step %d: service name is required", stepNum)
|
||||
// }
|
||||
//
|
||||
// if step.SpanName == "" {
|
||||
// return fmt.Errorf("step %d: span name is required", stepNum)
|
||||
// }
|
||||
//
|
||||
// if step.Order < 0 {
|
||||
// return fmt.Errorf("step %d: order must be non-negative", stepNum)
|
||||
// }
|
||||
//
|
||||
// return nil
|
||||
//}
|
||||
//
|
||||
//// ValidateTimeRange validates a time range
|
||||
//func ValidateTimeRange(timeRange tracefunnel.TimeRange) error {
|
||||
// if timeRange.StartTime <= 0 {
|
||||
// return fmt.Errorf("start time must be positive")
|
||||
// }
|
||||
//
|
||||
// if timeRange.EndTime <= 0 {
|
||||
// return fmt.Errorf("end time must be positive")
|
||||
// }
|
||||
//
|
||||
// if timeRange.EndTime < timeRange.StartTime {
|
||||
// return fmt.Errorf("end time must be after start time")
|
||||
// }
|
||||
//
|
||||
// // Check if the time range is not too far in the future
|
||||
// now := time.Now().UnixNano() / 1000000 // Convert to milliseconds
|
||||
// if timeRange.EndTime > now {
|
||||
// return fmt.Errorf("end time cannot be in the future")
|
||||
// }
|
||||
//
|
||||
// // Check if the time range is not too old (e.g., more than 30 days)
|
||||
// maxAge := int64(30 * 24 * 60 * 60 * 1000) // 30 days in milliseconds
|
||||
// if now-timeRange.StartTime > maxAge {
|
||||
// return fmt.Errorf("time range cannot be older than 30 days")
|
||||
// }
|
||||
//
|
||||
// return nil
|
||||
//}
|
||||
//
|
||||
//// ValidateStepOrder validates that step orders are sequential
|
||||
//func ValidateStepOrder(steps []tracefunnel.FunnelStep) error {
|
||||
// if len(steps) < 2 {
|
||||
// return nil
|
||||
// }
|
||||
//
|
||||
// // Create a map to track used orders
|
||||
// usedOrders := make(map[int64]bool)
|
||||
//
|
||||
// for i, step := range steps {
|
||||
// if usedOrders[step.Order] {
|
||||
// return fmt.Errorf("duplicate step order %d at step %d", step.Order, i+1)
|
||||
// }
|
||||
// usedOrders[step.Order] = true
|
||||
// }
|
||||
//
|
||||
// // Check if orders are sequential
|
||||
// for i := 0; i < len(steps)-1; i++ {
|
||||
// if steps[i+1].Order != steps[i].Order+1 {
|
||||
// return fmt.Errorf("step orders must be sequential")
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// return nil
|
||||
//}
|
||||
@@ -795,18 +795,14 @@ func (r *ClickHouseReader) GetSpansForTrace(ctx context.Context, traceID string,
|
||||
return searchScanResponses, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadataCache(ctx context.Context, traceID string) (*model.GetWaterfallSpansForTraceWithMetadataCache, error) {
|
||||
func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadataCache(ctx context.Context, orgID valuer.UUID, traceID string) (*model.GetWaterfallSpansForTraceWithMetadataCache, error) {
|
||||
cachedTraceData := new(model.GetWaterfallSpansForTraceWithMetadataCache)
|
||||
cacheStatus, err := r.cache.Retrieve(ctx, fmt.Sprintf("getWaterfallSpansForTraceWithMetadata-%v", traceID), cachedTraceData, false)
|
||||
err := r.cache.Get(ctx, orgID, strings.Join([]string{"getWaterfallSpansForTraceWithMetadata", traceID}, "-"), cachedTraceData, false)
|
||||
if err != nil {
|
||||
zap.L().Debug("error in retrieving getWaterfallSpansForTraceWithMetadata cache", zap.Error(err), zap.String("traceID", traceID))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if cacheStatus != cache.RetrieveStatusHit {
|
||||
return nil, errors.Errorf("cache status for getWaterfallSpansForTraceWithMetadata : %s, traceID: %s", cacheStatus, traceID)
|
||||
}
|
||||
|
||||
if time.Since(time.UnixMilli(int64(cachedTraceData.EndTime))) < r.fluxIntervalForTraceDetail {
|
||||
zap.L().Info("the trace end time falls under the flux interval, skipping getWaterfallSpansForTraceWithMetadata cache", zap.String("traceID", traceID))
|
||||
return nil, errors.Errorf("the trace end time falls under the flux interval, skipping getWaterfallSpansForTraceWithMetadata cache, traceID: %s", traceID)
|
||||
@@ -816,7 +812,7 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadataCache(ctx contex
|
||||
return cachedTraceData, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Context, traceID string, req *model.GetWaterfallSpansForTraceWithMetadataParams) (*model.GetWaterfallSpansForTraceWithMetadataResponse, *model.ApiError) {
|
||||
func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Context, orgID valuer.UUID, traceID string, req *model.GetWaterfallSpansForTraceWithMetadataParams) (*model.GetWaterfallSpansForTraceWithMetadataResponse, *model.ApiError) {
|
||||
response := new(model.GetWaterfallSpansForTraceWithMetadataResponse)
|
||||
var startTime, endTime, durationNano, totalErrorSpans, totalSpans uint64
|
||||
var spanIdToSpanNodeMap = map[string]*model.Span{}
|
||||
@@ -826,7 +822,7 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
|
||||
var hasMissingSpans bool
|
||||
|
||||
claims, errv2 := authtypes.ClaimsFromContext(ctx)
|
||||
cachedTraceData, err := r.GetWaterfallSpansForTraceWithMetadataCache(ctx, traceID)
|
||||
cachedTraceData, err := r.GetWaterfallSpansForTraceWithMetadataCache(ctx, orgID, traceID)
|
||||
if err == nil {
|
||||
startTime = cachedTraceData.StartTime
|
||||
endTime = cachedTraceData.EndTime
|
||||
@@ -984,7 +980,7 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
|
||||
}
|
||||
|
||||
zap.L().Info("getWaterfallSpansForTraceWithMetadata: processing pre cache", zap.Duration("duration", time.Since(processingBeforeCache)), zap.String("traceID", traceID))
|
||||
cacheErr := r.cache.Store(ctx, fmt.Sprintf("getWaterfallSpansForTraceWithMetadata-%v", traceID), &traceCache, time.Minute*5)
|
||||
cacheErr := r.cache.Set(ctx, orgID, strings.Join([]string{"getWaterfallSpansForTraceWithMetadata", traceID}, "-"), &traceCache, time.Minute*5)
|
||||
if cacheErr != nil {
|
||||
zap.L().Debug("failed to store cache for getWaterfallSpansForTraceWithMetadata", zap.String("traceID", traceID), zap.Error(err))
|
||||
}
|
||||
@@ -1007,18 +1003,14 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetFlamegraphSpansForTraceCache(ctx context.Context, traceID string) (*model.GetFlamegraphSpansForTraceCache, error) {
|
||||
func (r *ClickHouseReader) GetFlamegraphSpansForTraceCache(ctx context.Context, orgID valuer.UUID, traceID string) (*model.GetFlamegraphSpansForTraceCache, error) {
|
||||
cachedTraceData := new(model.GetFlamegraphSpansForTraceCache)
|
||||
cacheStatus, err := r.cache.Retrieve(ctx, fmt.Sprintf("getFlamegraphSpansForTrace-%v", traceID), cachedTraceData, false)
|
||||
err := r.cache.Get(ctx, orgID, strings.Join([]string{"getFlamegraphSpansForTrace", traceID}, "-"), cachedTraceData, false)
|
||||
if err != nil {
|
||||
zap.L().Debug("error in retrieving getFlamegraphSpansForTrace cache", zap.Error(err), zap.String("traceID", traceID))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if cacheStatus != cache.RetrieveStatusHit {
|
||||
return nil, errors.Errorf("cache status for getFlamegraphSpansForTrace : %s, traceID: %s", cacheStatus, traceID)
|
||||
}
|
||||
|
||||
if time.Since(time.UnixMilli(int64(cachedTraceData.EndTime))) < r.fluxIntervalForTraceDetail {
|
||||
zap.L().Info("the trace end time falls under the flux interval, skipping getFlamegraphSpansForTrace cache", zap.String("traceID", traceID))
|
||||
return nil, errors.Errorf("the trace end time falls under the flux interval, skipping getFlamegraphSpansForTrace cache, traceID: %s", traceID)
|
||||
@@ -1028,7 +1020,7 @@ func (r *ClickHouseReader) GetFlamegraphSpansForTraceCache(ctx context.Context,
|
||||
return cachedTraceData, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, traceID string, req *model.GetFlamegraphSpansForTraceParams) (*model.GetFlamegraphSpansForTraceResponse, *model.ApiError) {
|
||||
func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, orgID valuer.UUID, traceID string, req *model.GetFlamegraphSpansForTraceParams) (*model.GetFlamegraphSpansForTraceResponse, *model.ApiError) {
|
||||
trace := new(model.GetFlamegraphSpansForTraceResponse)
|
||||
var startTime, endTime, durationNano uint64
|
||||
var spanIdToSpanNodeMap = map[string]*model.FlamegraphSpan{}
|
||||
@@ -1037,7 +1029,7 @@ func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, trace
|
||||
var traceRoots []*model.FlamegraphSpan
|
||||
|
||||
// get the trace tree from cache!
|
||||
cachedTraceData, err := r.GetFlamegraphSpansForTraceCache(ctx, traceID)
|
||||
cachedTraceData, err := r.GetFlamegraphSpansForTraceCache(ctx, orgID, traceID)
|
||||
|
||||
if err == nil {
|
||||
startTime = cachedTraceData.StartTime
|
||||
@@ -1136,7 +1128,7 @@ func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, trace
|
||||
}
|
||||
|
||||
zap.L().Info("getFlamegraphSpansForTrace: processing pre cache", zap.Duration("duration", time.Since(processingBeforeCache)), zap.String("traceID", traceID))
|
||||
cacheErr := r.cache.Store(ctx, fmt.Sprintf("getFlamegraphSpansForTrace-%v", traceID), &traceCache, time.Minute*5)
|
||||
cacheErr := r.cache.Set(ctx, orgID, strings.Join([]string{"getFlamegraphSpansForTrace", traceID}, "-"), &traceCache, time.Minute*5)
|
||||
if cacheErr != nil {
|
||||
zap.L().Debug("failed to store cache for getFlamegraphSpansForTrace", zap.String("traceID", traceID), zap.Error(err))
|
||||
}
|
||||
@@ -2266,11 +2258,11 @@ func (r *ClickHouseReader) GetTotalLogs(ctx context.Context) (uint64, error) {
|
||||
return totalLogs, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) FetchTemporality(ctx context.Context, metricNames []string) (map[string]map[v3.Temporality]bool, error) {
|
||||
func (r *ClickHouseReader) FetchTemporality(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]map[v3.Temporality]bool, error) {
|
||||
metricNameToTemporality := make(map[string]map[v3.Temporality]bool)
|
||||
var metricNamesToQuery []string
|
||||
for _, metricName := range metricNames {
|
||||
updatedMetadata, cacheErr := r.GetUpdatedMetricsMetadata(ctx, metricName)
|
||||
updatedMetadata, cacheErr := r.GetUpdatedMetricsMetadata(ctx, orgID, metricName)
|
||||
if cacheErr != nil {
|
||||
zap.L().Info("Error in getting metrics cached metadata", zap.Error(cacheErr))
|
||||
}
|
||||
@@ -2956,7 +2948,7 @@ func (r *ClickHouseReader) QueryDashboardVars(ctx context.Context, query string)
|
||||
return &result, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetMetricAggregateAttributes(ctx context.Context, req *v3.AggregateAttributeRequest, skipDotNames bool, skipSignozMetrics bool) (*v3.AggregateAttributeResponse, error) {
|
||||
func (r *ClickHouseReader) GetMetricAggregateAttributes(ctx context.Context, orgID valuer.UUID, req *v3.AggregateAttributeRequest, skipDotNames bool, skipSignozMetrics bool) (*v3.AggregateAttributeResponse, error) {
|
||||
|
||||
var query string
|
||||
var err error
|
||||
@@ -2991,7 +2983,7 @@ func (r *ClickHouseReader) GetMetricAggregateAttributes(ctx context.Context, req
|
||||
continue
|
||||
}
|
||||
|
||||
metadata, apiError := r.GetUpdatedMetricsMetadata(ctx, metricName)
|
||||
metadata, apiError := r.GetUpdatedMetricsMetadata(ctx, orgID, metricName)
|
||||
if apiError != nil {
|
||||
zap.L().Error("Error in getting metrics cached metadata", zap.Error(apiError))
|
||||
}
|
||||
@@ -3096,7 +3088,7 @@ func (r *ClickHouseReader) GetMetricAttributeValues(ctx context.Context, req *v3
|
||||
return &attributeValues, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetMetricMetadata(ctx context.Context, metricName, serviceName string) (*v3.MetricMetadataResponse, error) {
|
||||
func (r *ClickHouseReader) GetMetricMetadata(ctx context.Context, orgID valuer.UUID, metricName, serviceName string) (*v3.MetricMetadataResponse, error) {
|
||||
|
||||
unixMilli := common.PastDayRoundOff()
|
||||
|
||||
@@ -3121,7 +3113,7 @@ func (r *ClickHouseReader) GetMetricMetadata(ctx context.Context, metricName, se
|
||||
deltaExists = true
|
||||
}
|
||||
}
|
||||
metadata, apiError := r.GetUpdatedMetricsMetadata(ctx, metricName)
|
||||
metadata, apiError := r.GetUpdatedMetricsMetadata(ctx, orgID, metricName)
|
||||
if apiError != nil {
|
||||
zap.L().Error("Error in getting metric cached metadata", zap.Error(apiError))
|
||||
}
|
||||
@@ -5187,7 +5179,7 @@ func (r *ClickHouseReader) GetActiveTimeSeriesForMetricName(ctx context.Context,
|
||||
return timeSeries, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_explorer.SummaryListMetricsRequest) (*metrics_explorer.SummaryListMetricsResponse, *model.ApiError) {
|
||||
func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.UUID, req *metrics_explorer.SummaryListMetricsRequest) (*metrics_explorer.SummaryListMetricsResponse, *model.ApiError) {
|
||||
var args []interface{}
|
||||
|
||||
// Build filter conditions (if any)
|
||||
@@ -5365,7 +5357,7 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
|
||||
}
|
||||
|
||||
//get updated metrics data
|
||||
batch, apiError := r.GetUpdatedMetricsMetadata(ctx, metricNames...)
|
||||
batch, apiError := r.GetUpdatedMetricsMetadata(ctx, orgID, metricNames...)
|
||||
if apiError != nil {
|
||||
zap.L().Error("Error in getting metrics cached metadata", zap.Error(apiError))
|
||||
}
|
||||
@@ -6022,18 +6014,18 @@ LIMIT 40`, // added rand to get diff value every time we run this query
|
||||
return fingerprints, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) DeleteMetricsMetadata(ctx context.Context, metricName string) *model.ApiError {
|
||||
func (r *ClickHouseReader) DeleteMetricsMetadata(ctx context.Context, orgID valuer.UUID, metricName string) *model.ApiError {
|
||||
delQuery := fmt.Sprintf(`ALTER TABLE %s.%s DELETE WHERE metric_name = ?;`, signozMetricDBName, signozUpdatedMetricsMetadataLocalTable)
|
||||
valueCtx := context.WithValue(ctx, "clickhouse_max_threads", constants.MetricsExplorerClickhouseThreads)
|
||||
err := r.db.Exec(valueCtx, delQuery, metricName)
|
||||
if err != nil {
|
||||
return &model.ApiError{Typ: "ClickHouseError", Err: err}
|
||||
}
|
||||
r.cache.Remove(ctx, constants.UpdatedMetricsMetadataCachePrefix+metricName)
|
||||
r.cache.Delete(ctx, orgID, constants.UpdatedMetricsMetadataCachePrefix+metricName)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) UpdateMetricsMetadata(ctx context.Context, req *model.UpdateMetricsMetadata) *model.ApiError {
|
||||
func (r *ClickHouseReader) UpdateMetricsMetadata(ctx context.Context, orgID valuer.UUID, req *model.UpdateMetricsMetadata) *model.ApiError {
|
||||
if req.MetricType == v3.MetricTypeHistogram {
|
||||
labels := []string{"le"}
|
||||
hasLabels, apiError := r.CheckForLabelsInMetric(ctx, req.MetricName, labels)
|
||||
@@ -6062,7 +6054,7 @@ func (r *ClickHouseReader) UpdateMetricsMetadata(ctx context.Context, req *model
|
||||
}
|
||||
}
|
||||
|
||||
apiErr := r.DeleteMetricsMetadata(ctx, req.MetricName)
|
||||
apiErr := r.DeleteMetricsMetadata(ctx, orgID, req.MetricName)
|
||||
if apiErr != nil {
|
||||
return apiErr
|
||||
}
|
||||
@@ -6073,7 +6065,7 @@ VALUES ( ?, ?, ?, ?, ?, ?, ?);`, signozMetricDBName, signozUpdatedMetricsMetadat
|
||||
if err != nil {
|
||||
return &model.ApiError{Typ: "ClickHouseError", Err: err}
|
||||
}
|
||||
err = r.cache.Store(ctx, constants.UpdatedMetricsMetadataCachePrefix+req.MetricName, req, -1)
|
||||
err = r.cache.Set(ctx, orgID, constants.UpdatedMetricsMetadataCachePrefix+req.MetricName, req, -1)
|
||||
if err != nil {
|
||||
return &model.ApiError{Typ: "CachingErr", Err: err}
|
||||
}
|
||||
@@ -6114,7 +6106,7 @@ func (r *ClickHouseReader) CheckForLabelsInMetric(ctx context.Context, metricNam
|
||||
return hasLE, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) PreloadMetricsMetadata(ctx context.Context) []error {
|
||||
func (r *ClickHouseReader) PreloadMetricsMetadata(ctx context.Context, orgID valuer.UUID) []error {
|
||||
var allMetricsMetadata []model.UpdateMetricsMetadata
|
||||
var errorList []error
|
||||
// Fetch all rows from ClickHouse
|
||||
@@ -6127,7 +6119,7 @@ func (r *ClickHouseReader) PreloadMetricsMetadata(ctx context.Context) []error {
|
||||
return errorList
|
||||
}
|
||||
for _, m := range allMetricsMetadata {
|
||||
err := r.cache.Store(ctx, constants.UpdatedMetricsMetadataCachePrefix+m.MetricName, &m, -1)
|
||||
err := r.cache.Set(ctx, orgID, constants.UpdatedMetricsMetadataCachePrefix+m.MetricName, &m, -1)
|
||||
if err != nil {
|
||||
errorList = append(errorList, err)
|
||||
}
|
||||
@@ -6136,7 +6128,7 @@ func (r *ClickHouseReader) PreloadMetricsMetadata(ctx context.Context) []error {
|
||||
return errorList
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetUpdatedMetricsMetadata(ctx context.Context, metricNames ...string) (map[string]*model.UpdateMetricsMetadata, *model.ApiError) {
|
||||
func (r *ClickHouseReader) GetUpdatedMetricsMetadata(ctx context.Context, orgID valuer.UUID, metricNames ...string) (map[string]*model.UpdateMetricsMetadata, *model.ApiError) {
|
||||
cachedMetadata := make(map[string]*model.UpdateMetricsMetadata)
|
||||
var missingMetrics []string
|
||||
|
||||
@@ -6144,8 +6136,8 @@ func (r *ClickHouseReader) GetUpdatedMetricsMetadata(ctx context.Context, metric
|
||||
for _, metricName := range metricNames {
|
||||
metadata := new(model.UpdateMetricsMetadata)
|
||||
cacheKey := constants.UpdatedMetricsMetadataCachePrefix + metricName
|
||||
retrieveStatus, err := r.cache.Retrieve(ctx, cacheKey, metadata, true)
|
||||
if err == nil && retrieveStatus == cache.RetrieveStatusHit {
|
||||
err := r.cache.Get(ctx, orgID, cacheKey, metadata, true)
|
||||
if err == nil {
|
||||
cachedMetadata[metricName] = metadata
|
||||
} else {
|
||||
if err != nil {
|
||||
@@ -6185,7 +6177,7 @@ func (r *ClickHouseReader) GetUpdatedMetricsMetadata(ctx context.Context, metric
|
||||
|
||||
// Cache the result for future requests.
|
||||
cacheKey := constants.UpdatedMetricsMetadataCachePrefix + metadata.MetricName
|
||||
if cacheErr := r.cache.Store(ctx, cacheKey, metadata, -1); cacheErr != nil {
|
||||
if cacheErr := r.cache.Set(ctx, orgID, cacheKey, metadata, -1); cacheErr != nil {
|
||||
zap.L().Error("Failed to store metrics metadata in cache", zap.String("metric_name", metadata.MetricName), zap.Error(cacheErr))
|
||||
}
|
||||
cachedMetadata[metadata.MetricName] = metadata
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,10 @@ import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func (aH *APIHandler) getHostAttributeKeys(w http.ResponseWriter, r *http.Request) {
|
||||
@@ -50,17 +53,27 @@ func (aH *APIHandler) getHostAttributeValues(w http.ResponseWriter, r *http.Requ
|
||||
|
||||
func (aH *APIHandler) getHostList(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
req := model.HostListRequest{}
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
req := model.HostListRequest{}
|
||||
// parse request
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
err = json.NewDecoder(r.Body).Decode(&req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
// get host list
|
||||
hostList, err := aH.hostsRepo.GetHostList(ctx, req)
|
||||
hostList, err := aH.hostsRepo.GetHostList(ctx, orgID, req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
@@ -106,15 +119,25 @@ func (aH *APIHandler) getProcessAttributeValues(w http.ResponseWriter, r *http.R
|
||||
|
||||
func (aH *APIHandler) getProcessList(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
req := model.ProcessListRequest{}
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
req := model.ProcessListRequest{}
|
||||
err = json.NewDecoder(r.Body).Decode(&req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
hostList, err := aH.processesRepo.GetProcessList(ctx, req)
|
||||
hostList, err := aH.processesRepo.GetProcessList(ctx, orgID, req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
@@ -159,15 +182,25 @@ func (aH *APIHandler) getPodAttributeValues(w http.ResponseWriter, r *http.Reque
|
||||
|
||||
func (aH *APIHandler) getPodList(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
req := model.PodListRequest{}
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
req := model.PodListRequest{}
|
||||
err = json.NewDecoder(r.Body).Decode(&req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
podList, err := aH.podsRepo.GetPodList(ctx, req)
|
||||
podList, err := aH.podsRepo.GetPodList(ctx, orgID, req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
@@ -212,15 +245,25 @@ func (aH *APIHandler) getNodeAttributeValues(w http.ResponseWriter, r *http.Requ
|
||||
|
||||
func (aH *APIHandler) getNodeList(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
req := model.NodeListRequest{}
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
req := model.NodeListRequest{}
|
||||
err = json.NewDecoder(r.Body).Decode(&req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
nodeList, err := aH.nodesRepo.GetNodeList(ctx, req)
|
||||
nodeList, err := aH.nodesRepo.GetNodeList(ctx, orgID, req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
@@ -265,15 +308,25 @@ func (aH *APIHandler) getNamespaceAttributeValues(w http.ResponseWriter, r *http
|
||||
|
||||
func (aH *APIHandler) getNamespaceList(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
req := model.NamespaceListRequest{}
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
req := model.NamespaceListRequest{}
|
||||
err = json.NewDecoder(r.Body).Decode(&req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
namespaceList, err := aH.namespacesRepo.GetNamespaceList(ctx, req)
|
||||
namespaceList, err := aH.namespacesRepo.GetNamespaceList(ctx, orgID, req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
@@ -318,15 +371,25 @@ func (aH *APIHandler) getClusterAttributeValues(w http.ResponseWriter, r *http.R
|
||||
|
||||
func (aH *APIHandler) getClusterList(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
req := model.ClusterListRequest{}
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
req := model.ClusterListRequest{}
|
||||
err = json.NewDecoder(r.Body).Decode(&req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
clusterList, err := aH.clustersRepo.GetClusterList(ctx, req)
|
||||
clusterList, err := aH.clustersRepo.GetClusterList(ctx, orgID, req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
@@ -371,15 +434,25 @@ func (aH *APIHandler) getDeploymentAttributeValues(w http.ResponseWriter, r *htt
|
||||
|
||||
func (aH *APIHandler) getDeploymentList(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
req := model.DeploymentListRequest{}
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
req := model.DeploymentListRequest{}
|
||||
err = json.NewDecoder(r.Body).Decode(&req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
deploymentList, err := aH.deploymentsRepo.GetDeploymentList(ctx, req)
|
||||
deploymentList, err := aH.deploymentsRepo.GetDeploymentList(ctx, orgID, req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
@@ -424,15 +497,25 @@ func (aH *APIHandler) getDaemonSetAttributeValues(w http.ResponseWriter, r *http
|
||||
|
||||
func (aH *APIHandler) getDaemonSetList(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
req := model.DaemonSetListRequest{}
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
req := model.DaemonSetListRequest{}
|
||||
err = json.NewDecoder(r.Body).Decode(&req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
daemonSetList, err := aH.daemonsetsRepo.GetDaemonSetList(ctx, req)
|
||||
daemonSetList, err := aH.daemonsetsRepo.GetDaemonSetList(ctx, orgID, req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
@@ -477,15 +560,25 @@ func (aH *APIHandler) getStatefulSetAttributeValues(w http.ResponseWriter, r *ht
|
||||
|
||||
func (aH *APIHandler) getStatefulSetList(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
req := model.StatefulSetListRequest{}
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
req := model.StatefulSetListRequest{}
|
||||
err = json.NewDecoder(r.Body).Decode(&req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
statefulSetList, err := aH.statefulsetsRepo.GetStatefulSetList(ctx, req)
|
||||
statefulSetList, err := aH.statefulsetsRepo.GetStatefulSetList(ctx, orgID, req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
@@ -528,15 +621,25 @@ func (aH *APIHandler) getJobAttributeValues(w http.ResponseWriter, r *http.Reque
|
||||
|
||||
func (aH *APIHandler) getJobList(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
req := model.JobListRequest{}
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
req := model.JobListRequest{}
|
||||
err = json.NewDecoder(r.Body).Decode(&req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
jobList, err := aH.jobsRepo.GetJobList(ctx, req)
|
||||
jobList, err := aH.jobsRepo.GetJobList(ctx, orgID, req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
@@ -547,15 +650,25 @@ func (aH *APIHandler) getJobList(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
func (aH *APIHandler) getPvcList(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
req := model.VolumeListRequest{}
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
req := model.VolumeListRequest{}
|
||||
err = json.NewDecoder(r.Body).Decode(&req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
pvcList, err := aH.pvcsRepo.GetPvcList(ctx, req)
|
||||
pvcList, err := aH.pvcsRepo.GetPvcList(ctx, orgID, req)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
return
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
@@ -131,7 +132,7 @@ func (p *ClustersRepo) getMetadataAttributes(ctx context.Context, req model.Clus
|
||||
return clusterAttrs, nil
|
||||
}
|
||||
|
||||
func (p *ClustersRepo) getTopClusterGroups(ctx context.Context, req model.ClusterListRequest, q *v3.QueryRangeParamsV3) ([]map[string]string, []map[string]string, error) {
|
||||
func (p *ClustersRepo) getTopClusterGroups(ctx context.Context, orgID valuer.UUID, req model.ClusterListRequest, q *v3.QueryRangeParamsV3) ([]map[string]string, []map[string]string, error) {
|
||||
step, timeSeriesTableName, samplesTableName := getParamsForTopClusters(req)
|
||||
|
||||
queryNames := queryNamesForClusters[req.OrderBy.ColumnName]
|
||||
@@ -162,7 +163,7 @@ func (p *ClustersRepo) getTopClusterGroups(ctx context.Context, req model.Cluste
|
||||
topClusterGroupsQueryRangeParams.CompositeQuery.BuilderQueries[queryName] = query
|
||||
}
|
||||
|
||||
queryResponse, _, err := p.querierV2.QueryRange(ctx, topClusterGroupsQueryRangeParams)
|
||||
queryResponse, _, err := p.querierV2.QueryRange(ctx, orgID, topClusterGroupsQueryRangeParams)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
@@ -201,7 +202,7 @@ func (p *ClustersRepo) getTopClusterGroups(ctx context.Context, req model.Cluste
|
||||
return topClusterGroups, allClusterGroups, nil
|
||||
}
|
||||
|
||||
func (p *ClustersRepo) GetClusterList(ctx context.Context, req model.ClusterListRequest) (model.ClusterListResponse, error) {
|
||||
func (p *ClustersRepo) GetClusterList(ctx context.Context, orgID valuer.UUID, req model.ClusterListRequest) (model.ClusterListResponse, error) {
|
||||
resp := model.ClusterListResponse{}
|
||||
|
||||
if req.Limit == 0 {
|
||||
@@ -243,7 +244,7 @@ func (p *ClustersRepo) GetClusterList(ctx context.Context, req model.ClusterList
|
||||
return resp, err
|
||||
}
|
||||
|
||||
topClusterGroups, allClusterGroups, err := p.getTopClusterGroups(ctx, req, query)
|
||||
topClusterGroups, allClusterGroups, err := p.getTopClusterGroups(ctx, orgID, req, query)
|
||||
if err != nil {
|
||||
return resp, err
|
||||
}
|
||||
@@ -277,7 +278,7 @@ func (p *ClustersRepo) GetClusterList(ctx context.Context, req model.ClusterList
|
||||
}
|
||||
}
|
||||
|
||||
queryResponse, _, err := p.querierV2.QueryRange(ctx, query)
|
||||
queryResponse, _, err := p.querierV2.QueryRange(ctx, orgID, query)
|
||||
if err != nil {
|
||||
return resp, err
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
@@ -198,7 +199,7 @@ func (d *DaemonSetsRepo) getMetadataAttributes(ctx context.Context, req model.Da
|
||||
return daemonSetAttrs, nil
|
||||
}
|
||||
|
||||
func (d *DaemonSetsRepo) getTopDaemonSetGroups(ctx context.Context, req model.DaemonSetListRequest, q *v3.QueryRangeParamsV3) ([]map[string]string, []map[string]string, error) {
|
||||
func (d *DaemonSetsRepo) getTopDaemonSetGroups(ctx context.Context, orgID valuer.UUID, req model.DaemonSetListRequest, q *v3.QueryRangeParamsV3) ([]map[string]string, []map[string]string, error) {
|
||||
step, timeSeriesTableName, samplesTableName := getParamsForTopDaemonSets(req)
|
||||
|
||||
queryNames := queryNamesForDaemonSets[req.OrderBy.ColumnName]
|
||||
@@ -229,7 +230,7 @@ func (d *DaemonSetsRepo) getTopDaemonSetGroups(ctx context.Context, req model.Da
|
||||
topDaemonSetGroupsQueryRangeParams.CompositeQuery.BuilderQueries[queryName] = query
|
||||
}
|
||||
|
||||
queryResponse, _, err := d.querierV2.QueryRange(ctx, topDaemonSetGroupsQueryRangeParams)
|
||||
queryResponse, _, err := d.querierV2.QueryRange(ctx, orgID, topDaemonSetGroupsQueryRangeParams)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
@@ -268,7 +269,7 @@ func (d *DaemonSetsRepo) getTopDaemonSetGroups(ctx context.Context, req model.Da
|
||||
return topDaemonSetGroups, allDaemonSetGroups, nil
|
||||
}
|
||||
|
||||
func (d *DaemonSetsRepo) GetDaemonSetList(ctx context.Context, req model.DaemonSetListRequest) (model.DaemonSetListResponse, error) {
|
||||
func (d *DaemonSetsRepo) GetDaemonSetList(ctx context.Context, orgID valuer.UUID, req model.DaemonSetListRequest) (model.DaemonSetListResponse, error) {
|
||||
resp := model.DaemonSetListResponse{}
|
||||
|
||||
if req.Limit == 0 {
|
||||
@@ -320,7 +321,7 @@ func (d *DaemonSetsRepo) GetDaemonSetList(ctx context.Context, req model.DaemonS
|
||||
return resp, err
|
||||
}
|
||||
|
||||
topDaemonSetGroups, allDaemonSetGroups, err := d.getTopDaemonSetGroups(ctx, req, query)
|
||||
topDaemonSetGroups, allDaemonSetGroups, err := d.getTopDaemonSetGroups(ctx, orgID, req, query)
|
||||
if err != nil {
|
||||
return resp, err
|
||||
}
|
||||
@@ -354,7 +355,7 @@ func (d *DaemonSetsRepo) GetDaemonSetList(ctx context.Context, req model.DaemonS
|
||||
}
|
||||
}
|
||||
|
||||
queryResponse, _, err := d.querierV2.QueryRange(ctx, query)
|
||||
queryResponse, _, err := d.querierV2.QueryRange(ctx, orgID, query)
|
||||
if err != nil {
|
||||
return resp, err
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
@@ -198,7 +199,7 @@ func (d *DeploymentsRepo) getMetadataAttributes(ctx context.Context, req model.D
|
||||
return deploymentAttrs, nil
|
||||
}
|
||||
|
||||
func (d *DeploymentsRepo) getTopDeploymentGroups(ctx context.Context, req model.DeploymentListRequest, q *v3.QueryRangeParamsV3) ([]map[string]string, []map[string]string, error) {
|
||||
func (d *DeploymentsRepo) getTopDeploymentGroups(ctx context.Context, orgID valuer.UUID, req model.DeploymentListRequest, q *v3.QueryRangeParamsV3) ([]map[string]string, []map[string]string, error) {
|
||||
step, timeSeriesTableName, samplesTableName := getParamsForTopDeployments(req)
|
||||
|
||||
queryNames := queryNamesForDeployments[req.OrderBy.ColumnName]
|
||||
@@ -229,7 +230,7 @@ func (d *DeploymentsRepo) getTopDeploymentGroups(ctx context.Context, req model.
|
||||
topDeploymentGroupsQueryRangeParams.CompositeQuery.BuilderQueries[queryName] = query
|
||||
}
|
||||
|
||||
queryResponse, _, err := d.querierV2.QueryRange(ctx, topDeploymentGroupsQueryRangeParams)
|
||||
queryResponse, _, err := d.querierV2.QueryRange(ctx, orgID, topDeploymentGroupsQueryRangeParams)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
@@ -268,7 +269,7 @@ func (d *DeploymentsRepo) getTopDeploymentGroups(ctx context.Context, req model.
|
||||
return topDeploymentGroups, allDeploymentGroups, nil
|
||||
}
|
||||
|
||||
func (d *DeploymentsRepo) GetDeploymentList(ctx context.Context, req model.DeploymentListRequest) (model.DeploymentListResponse, error) {
|
||||
func (d *DeploymentsRepo) GetDeploymentList(ctx context.Context, orgID valuer.UUID, req model.DeploymentListRequest) (model.DeploymentListResponse, error) {
|
||||
resp := model.DeploymentListResponse{}
|
||||
|
||||
if req.Limit == 0 {
|
||||
@@ -320,7 +321,7 @@ func (d *DeploymentsRepo) GetDeploymentList(ctx context.Context, req model.Deplo
|
||||
return resp, err
|
||||
}
|
||||
|
||||
topDeploymentGroups, allDeploymentGroups, err := d.getTopDeploymentGroups(ctx, req, query)
|
||||
topDeploymentGroups, allDeploymentGroups, err := d.getTopDeploymentGroups(ctx, orgID, req, query)
|
||||
if err != nil {
|
||||
return resp, err
|
||||
}
|
||||
@@ -354,7 +355,7 @@ func (d *DeploymentsRepo) GetDeploymentList(ctx context.Context, req model.Deplo
|
||||
}
|
||||
}
|
||||
|
||||
queryResponse, _, err := d.querierV2.QueryRange(ctx, query)
|
||||
queryResponse, _, err := d.querierV2.QueryRange(ctx, orgID, query)
|
||||
if err != nil {
|
||||
return resp, err
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
"golang.org/x/exp/maps"
|
||||
"golang.org/x/exp/slices"
|
||||
@@ -129,7 +130,7 @@ func (h *HostsRepo) GetHostAttributeValues(ctx context.Context, req v3.FilterAtt
|
||||
return &v3.FilterAttributeValueResponse{StringAttributeValues: hostNames}, nil
|
||||
}
|
||||
|
||||
func (h *HostsRepo) getActiveHosts(ctx context.Context, req model.HostListRequest) (map[string]bool, error) {
|
||||
func (h *HostsRepo) getActiveHosts(ctx context.Context, orgID valuer.UUID, req model.HostListRequest) (map[string]bool, error) {
|
||||
activeStatus := map[string]bool{}
|
||||
step := common.MinAllowedStepInterval(req.Start, req.End)
|
||||
|
||||
@@ -172,7 +173,7 @@ func (h *HostsRepo) getActiveHosts(ctx context.Context, req model.HostListReques
|
||||
},
|
||||
}
|
||||
|
||||
queryResponse, _, err := h.querierV2.QueryRange(ctx, ¶ms)
|
||||
queryResponse, _, err := h.querierV2.QueryRange(ctx, orgID, ¶ms)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -248,7 +249,7 @@ func (h *HostsRepo) getMetadataAttributes(ctx context.Context, req model.HostLis
|
||||
return hostAttrs, nil
|
||||
}
|
||||
|
||||
func (h *HostsRepo) getTopHostGroups(ctx context.Context, req model.HostListRequest, q *v3.QueryRangeParamsV3) ([]map[string]string, []map[string]string, error) {
|
||||
func (h *HostsRepo) getTopHostGroups(ctx context.Context, orgID valuer.UUID, req model.HostListRequest, q *v3.QueryRangeParamsV3) ([]map[string]string, []map[string]string, error) {
|
||||
step, timeSeriesTableName, samplesTableName := getParamsForTopHosts(req)
|
||||
|
||||
queryNames := queryNamesForTopHosts[req.OrderBy.ColumnName]
|
||||
@@ -276,7 +277,7 @@ func (h *HostsRepo) getTopHostGroups(ctx context.Context, req model.HostListRequ
|
||||
topHostGroupsQueryRangeParams.CompositeQuery.BuilderQueries[queryName] = query
|
||||
}
|
||||
|
||||
queryResponse, _, err := h.querierV2.QueryRange(ctx, topHostGroupsQueryRangeParams)
|
||||
queryResponse, _, err := h.querierV2.QueryRange(ctx, orgID, topHostGroupsQueryRangeParams)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
@@ -384,7 +385,7 @@ func (h *HostsRepo) IsSendingK8SAgentMetrics(ctx context.Context, req model.Host
|
||||
return maps.Keys(clusterNames), maps.Keys(nodeNames), nil
|
||||
}
|
||||
|
||||
func (h *HostsRepo) GetHostList(ctx context.Context, req model.HostListRequest) (model.HostListResponse, error) {
|
||||
func (h *HostsRepo) GetHostList(ctx context.Context, orgID valuer.UUID, req model.HostListRequest) (model.HostListResponse, error) {
|
||||
resp := model.HostListResponse{}
|
||||
|
||||
if req.Limit == 0 {
|
||||
@@ -439,12 +440,12 @@ func (h *HostsRepo) GetHostList(ctx context.Context, req model.HostListRequest)
|
||||
return resp, err
|
||||
}
|
||||
|
||||
activeHosts, err := h.getActiveHosts(ctx, req)
|
||||
activeHosts, err := h.getActiveHosts(ctx, orgID, req)
|
||||
if err != nil {
|
||||
return resp, err
|
||||
}
|
||||
|
||||
topHostGroups, allHostGroups, err := h.getTopHostGroups(ctx, req, query)
|
||||
topHostGroups, allHostGroups, err := h.getTopHostGroups(ctx, orgID, req, query)
|
||||
if err != nil {
|
||||
return resp, err
|
||||
}
|
||||
@@ -477,7 +478,7 @@ func (h *HostsRepo) GetHostList(ctx context.Context, req model.HostListRequest)
|
||||
}
|
||||
}
|
||||
|
||||
queryResponse, _, err := h.querierV2.QueryRange(ctx, query)
|
||||
queryResponse, _, err := h.querierV2.QueryRange(ctx, orgID, query)
|
||||
if err != nil {
|
||||
return resp, err
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user