mirror of
https://github.com/SigNoz/signoz.git
synced 2026-03-17 18:32:11 +00:00
Compare commits
11 Commits
tvats-expo
...
fix/cloudi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1502338f17 | ||
|
|
fdf75f03ac | ||
|
|
12b02a1002 | ||
|
|
4ce220ba92 | ||
|
|
0211ddf0cb | ||
|
|
e5eb62e45b | ||
|
|
7371dcacf0 | ||
|
|
3cdf3e06f3 | ||
|
|
f8c38df2bf | ||
|
|
cab4a56694 | ||
|
|
78041fe457 |
10
.github/CODEOWNERS
vendored
10
.github/CODEOWNERS
vendored
@@ -1,8 +1,6 @@
|
||||
# CODEOWNERS info: https://help.github.com/en/articles/about-code-owners
|
||||
|
||||
# Owners are automatically requested for review for PRs that changes code
|
||||
|
||||
# that they own.
|
||||
# Owners are automatically requested for review for PRs that changes code that they own.
|
||||
|
||||
/frontend/ @SigNoz/frontend-maintainers
|
||||
|
||||
@@ -11,8 +9,10 @@
|
||||
/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json @makeavish
|
||||
/frontend/src/container/OnboardingV2Container/AddDataSource/AddDataSource.tsx @makeavish
|
||||
|
||||
/deploy/ @SigNoz/devops
|
||||
.github @SigNoz/devops
|
||||
# CI
|
||||
/deploy/ @therealpandey
|
||||
.github @therealpandey
|
||||
go.mod @therealpandey
|
||||
|
||||
# Scaffold Owners
|
||||
|
||||
|
||||
1
.github/workflows/mergequeueci.yaml
vendored
1
.github/workflows/mergequeueci.yaml
vendored
@@ -8,6 +8,7 @@ on:
|
||||
jobs:
|
||||
notify:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.merged == false
|
||||
steps:
|
||||
- name: alert
|
||||
uses: slackapi/slack-github-action@v2.1.1
|
||||
|
||||
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@@ -17,7 +17,5 @@
|
||||
},
|
||||
"[html]": {
|
||||
"editor.defaultFormatter": "vscode.html-language-features"
|
||||
},
|
||||
"python-envs.defaultEnvManager": "ms-python.python:system",
|
||||
"python-envs.pythonProjects": []
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/spf13/cobra"
|
||||
"go.uber.org/zap" //nolint:depguard
|
||||
)
|
||||
|
||||
var RootCmd = &cobra.Command{
|
||||
@@ -19,12 +18,6 @@ var RootCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
func Execute(logger *slog.Logger) {
|
||||
zapLogger := newZapLogger()
|
||||
zap.ReplaceGlobals(zapLogger)
|
||||
defer func() {
|
||||
_ = zapLogger.Sync()
|
||||
}()
|
||||
|
||||
err := RootCmd.Execute()
|
||||
if err != nil {
|
||||
logger.ErrorContext(RootCmd.Context(), "error running command", "error", err)
|
||||
|
||||
110
cmd/zap.go
110
cmd/zap.go
@@ -1,110 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"go.uber.org/zap" //nolint:depguard
|
||||
"go.uber.org/zap/zapcore" //nolint:depguard
|
||||
)
|
||||
|
||||
// Deprecated: Use `NewLogger` from `pkg/instrumentation` instead.
|
||||
func newZapLogger() *zap.Logger {
|
||||
config := zap.NewProductionConfig()
|
||||
config.EncoderConfig.TimeKey = "timestamp"
|
||||
config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
|
||||
|
||||
// Extract sampling config before building the logger.
|
||||
// We need to disable sampling in the config and apply it manually later
|
||||
// to ensure correct core ordering. See filteringCore documentation for details.
|
||||
samplerConfig := config.Sampling
|
||||
config.Sampling = nil
|
||||
|
||||
logger, _ := config.Build()
|
||||
|
||||
// Wrap with custom core wrapping to filter certain log entries.
|
||||
// The order of wrapping is important:
|
||||
// 1. First wrap with filteringCore
|
||||
// 2. Then wrap with sampler
|
||||
//
|
||||
// This creates the call chain: sampler -> filteringCore -> ioCore
|
||||
//
|
||||
// During logging:
|
||||
// - sampler.Check decides whether to sample the log entry
|
||||
// - If sampled, filteringCore.Check is called
|
||||
// - filteringCore adds itself to CheckedEntry.cores
|
||||
// - All cores in CheckedEntry.cores have their Write method called
|
||||
// - filteringCore.Write can now filter the entry before passing to ioCore
|
||||
//
|
||||
// If we didn't disable the sampler above, filteringCore would have wrapped
|
||||
// sampler. By calling sampler.Check we would have allowed it to call
|
||||
// ioCore.Check that adds itself to CheckedEntry.cores. Then ioCore.Write
|
||||
// would have bypassed our checks, making filtering impossible.
|
||||
return logger.WithOptions(zap.WrapCore(func(core zapcore.Core) zapcore.Core {
|
||||
core = &filteringCore{core}
|
||||
if samplerConfig != nil {
|
||||
core = zapcore.NewSamplerWithOptions(
|
||||
core,
|
||||
time.Second,
|
||||
samplerConfig.Initial,
|
||||
samplerConfig.Thereafter,
|
||||
)
|
||||
}
|
||||
return core
|
||||
}))
|
||||
}
|
||||
|
||||
// filteringCore wraps a zapcore.Core to filter out log entries based on a
|
||||
// custom logic.
|
||||
//
|
||||
// Note: This core must be positioned before the sampler in the core chain
|
||||
// to ensure Write is called. See newZapLogger for ordering details.
|
||||
type filteringCore struct {
|
||||
zapcore.Core
|
||||
}
|
||||
|
||||
// filter determines whether a log entry should be written based on its fields.
|
||||
// Returns false if the entry should be suppressed, true otherwise.
|
||||
//
|
||||
// Current filters:
|
||||
// - context.Canceled: These are expected errors from cancelled operations,
|
||||
// and create noise in logs.
|
||||
func (c *filteringCore) filter(fields []zapcore.Field) bool {
|
||||
for _, field := range fields {
|
||||
if field.Type == zapcore.ErrorType {
|
||||
if loggedErr, ok := field.Interface.(error); ok {
|
||||
// Suppress logs containing context.Canceled errors
|
||||
if errors.Is(loggedErr, context.Canceled) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// With implements zapcore.Core.With
|
||||
// It returns a new copy with the added context.
|
||||
func (c *filteringCore) With(fields []zapcore.Field) zapcore.Core {
|
||||
return &filteringCore{c.Core.With(fields)}
|
||||
}
|
||||
|
||||
// Check implements zapcore.Core.Check.
|
||||
// It adds this core to the CheckedEntry if the log level is enabled,
|
||||
// ensuring that Write will be called for this entry.
|
||||
func (c *filteringCore) Check(ent zapcore.Entry, ce *zapcore.CheckedEntry) *zapcore.CheckedEntry {
|
||||
if c.Enabled(ent.Level) {
|
||||
return ce.AddCore(ent, c)
|
||||
}
|
||||
return ce
|
||||
}
|
||||
|
||||
// Write implements zapcore.Core.Write.
|
||||
// It filters log entries based on their fields before delegating to the wrapped core.
|
||||
func (c *filteringCore) Write(ent zapcore.Entry, fields []zapcore.Field) error {
|
||||
if !c.filter(fields) {
|
||||
return nil
|
||||
}
|
||||
return c.Core.Write(ent, fields)
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
version: "3"
|
||||
x-common: &common
|
||||
networks:
|
||||
- signoz-net
|
||||
extra_hosts:
|
||||
- host.docker.internal:host-gateway
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
services:
|
||||
hotrod:
|
||||
<<: *common
|
||||
image: jaegertracing/example-hotrod:1.61.0
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- OTEL_EXPORTER_OTLP_ENDPOINT=http://host.docker.internal:4318 #
|
||||
load-hotrod:
|
||||
<<: *common
|
||||
image: "signoz/locust:1.2.3"
|
||||
environment:
|
||||
ATTACKED_HOST: http://hotrod:8080
|
||||
LOCUST_MODE: standalone
|
||||
NO_PROXY: standalone
|
||||
TASK_DELAY_FROM: 5
|
||||
TASK_DELAY_TO: 30
|
||||
QUIET_MODE: "${QUIET_MODE:-false}"
|
||||
LOCUST_OPTS: "--headless -u 10 -r 1"
|
||||
volumes:
|
||||
- ../../../common/locust-scripts:/locust
|
||||
|
||||
networks:
|
||||
signoz-net:
|
||||
name: signoz-net
|
||||
external: true
|
||||
@@ -1,69 +0,0 @@
|
||||
version: "3"
|
||||
x-common: &common
|
||||
networks:
|
||||
- signoz-net
|
||||
extra_hosts:
|
||||
- host.docker.internal:host-gateway
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
deploy:
|
||||
mode: global
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
services:
|
||||
otel-agent:
|
||||
<<: *common
|
||||
image: otel/opentelemetry-collector-contrib:0.111.0
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
volumes:
|
||||
- ./otel-agent-config.yaml:/etc/otel-collector-config.yaml
|
||||
- /:/hostfs:ro
|
||||
environment:
|
||||
- SIGNOZ_COLLECTOR_ENDPOINT=http://host.docker.internal:4317 # In case of external SigNoz or cloud, update the endpoint and access token
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
|
||||
# - SIGNOZ_ACCESS_TOKEN="<your-access-token>"
|
||||
# Before exposing the ports, make sure the ports are not used by other services
|
||||
# ports:
|
||||
# - "4317:4317"
|
||||
# - "4318:4318"
|
||||
otel-metrics:
|
||||
<<: *common
|
||||
image: otel/opentelemetry-collector-contrib:0.111.0
|
||||
user: 0:0 # If you have security concerns, you can replace this with your `UID:GID` that has necessary permissions to docker.sock
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
volumes:
|
||||
- ./otel-metrics-config.yaml:/etc/otel-collector-config.yaml
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
environment:
|
||||
- SIGNOZ_COLLECTOR_ENDPOINT=http://host.docker.internal:4317 # In case of external SigNoz or cloud, update the endpoint and access token
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
|
||||
# - SIGNOZ_ACCESS_TOKEN="<your-access-token>"
|
||||
# Before exposing the ports, make sure the ports are not used by other services
|
||||
# ports:
|
||||
# - "4317:4317"
|
||||
# - "4318:4318"
|
||||
deploy:
|
||||
mode: replicated
|
||||
replicas: 1
|
||||
placement:
|
||||
constraints:
|
||||
- node.role == manager
|
||||
logspout:
|
||||
<<: *common
|
||||
image: "gliderlabs/logspout:v3.2.14"
|
||||
command: syslog+tcp://otel-agent:2255
|
||||
user: root
|
||||
volumes:
|
||||
- /etc/hostname:/etc/host_hostname:ro
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
depends_on:
|
||||
- otel-agent
|
||||
|
||||
networks:
|
||||
signoz-net:
|
||||
name: signoz-net
|
||||
external: true
|
||||
@@ -1,102 +0,0 @@
|
||||
receivers:
|
||||
hostmetrics:
|
||||
collection_interval: 30s
|
||||
root_path: /hostfs
|
||||
scrapers:
|
||||
cpu: {}
|
||||
load: {}
|
||||
memory: {}
|
||||
disk: {}
|
||||
filesystem: {}
|
||||
network: {}
|
||||
otlp:
|
||||
protocols:
|
||||
grpc:
|
||||
endpoint: 0.0.0.0:4317
|
||||
http:
|
||||
endpoint: 0.0.0.0:4318
|
||||
prometheus:
|
||||
config:
|
||||
global:
|
||||
scrape_interval: 60s
|
||||
scrape_configs:
|
||||
- job_name: otel-agent
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost:8888
|
||||
labels:
|
||||
job_name: otel-agent
|
||||
tcplog/docker:
|
||||
listen_address: "0.0.0.0:2255"
|
||||
operators:
|
||||
- type: regex_parser
|
||||
regex: '^<([0-9]+)>[0-9]+ (?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P<container_id>\S+) (?P<container_name>\S+) [0-9]+ - -( (?P<body>.*))?'
|
||||
timestamp:
|
||||
parse_from: attributes.timestamp
|
||||
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
|
||||
- type: move
|
||||
from: attributes["body"]
|
||||
to: body
|
||||
- type: remove
|
||||
field: attributes.timestamp
|
||||
# please remove names from below if you want to collect logs from them
|
||||
- type: filter
|
||||
id: signoz_logs_filter
|
||||
expr: 'attributes.container_name matches "^(signoz_(logspout|signoz|otel-collector|clickhouse|zookeeper))|(infra_(logspout|otel-agent|otel-metrics)).*"'
|
||||
processors:
|
||||
batch:
|
||||
send_batch_size: 10000
|
||||
send_batch_max_size: 11000
|
||||
timeout: 10s
|
||||
resourcedetection:
|
||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
||||
detectors:
|
||||
# - ec2
|
||||
# - gcp
|
||||
# - azure
|
||||
- env
|
||||
- system
|
||||
timeout: 2s
|
||||
extensions:
|
||||
health_check:
|
||||
endpoint: 0.0.0.0:13133
|
||||
pprof:
|
||||
endpoint: 0.0.0.0:1777
|
||||
exporters:
|
||||
otlp:
|
||||
endpoint: ${env:SIGNOZ_COLLECTOR_ENDPOINT}
|
||||
tls:
|
||||
insecure: true
|
||||
headers:
|
||||
signoz-access-token: ${env:SIGNOZ_ACCESS_TOKEN}
|
||||
# debug: {}
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
encoding: json
|
||||
metrics:
|
||||
address: 0.0.0.0:8888
|
||||
extensions:
|
||||
- health_check
|
||||
- pprof
|
||||
pipelines:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [otlp]
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [otlp]
|
||||
metrics/hostmetrics:
|
||||
receivers: [hostmetrics]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [otlp]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [otlp]
|
||||
logs:
|
||||
receivers: [otlp, tcplog/docker]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [otlp]
|
||||
@@ -1,103 +0,0 @@
|
||||
receivers:
|
||||
prometheus:
|
||||
config:
|
||||
global:
|
||||
scrape_interval: 60s
|
||||
scrape_configs:
|
||||
- job_name: otel-metrics
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost:8888
|
||||
labels:
|
||||
job_name: otel-metrics
|
||||
# For Docker daemon metrics to be scraped, it must be configured to expose
|
||||
# Prometheus metrics, as documented here: https://docs.docker.com/config/daemon/prometheus/
|
||||
# - job_name: docker-daemon
|
||||
# dockerswarm_sd_configs:
|
||||
# - host: unix:///var/run/docker.sock
|
||||
# role: nodes
|
||||
# relabel_configs:
|
||||
# - source_labels: [__meta_dockerswarm_node_address]
|
||||
# target_label: __address__
|
||||
# replacement: $1:9323
|
||||
- job_name: "dockerswarm"
|
||||
dockerswarm_sd_configs:
|
||||
- host: unix:///var/run/docker.sock
|
||||
role: tasks
|
||||
relabel_configs:
|
||||
- action: keep
|
||||
regex: running
|
||||
source_labels:
|
||||
- __meta_dockerswarm_task_desired_state
|
||||
- action: keep
|
||||
regex: true
|
||||
source_labels:
|
||||
- __meta_dockerswarm_service_label_signoz_io_scrape
|
||||
- regex: ([^:]+)(?::\d+)?
|
||||
replacement: $1
|
||||
source_labels:
|
||||
- __address__
|
||||
target_label: swarm_container_ip
|
||||
- separator: .
|
||||
source_labels:
|
||||
- __meta_dockerswarm_service_name
|
||||
- __meta_dockerswarm_task_slot
|
||||
- __meta_dockerswarm_task_id
|
||||
target_label: swarm_container_name
|
||||
- target_label: __address__
|
||||
source_labels:
|
||||
- swarm_container_ip
|
||||
- __meta_dockerswarm_service_label_signoz_io_port
|
||||
separator: ":"
|
||||
- source_labels:
|
||||
- __meta_dockerswarm_service_label_signoz_io_path
|
||||
target_label: __metrics_path__
|
||||
- source_labels:
|
||||
- __meta_dockerswarm_service_label_com_docker_stack_namespace
|
||||
target_label: namespace
|
||||
- source_labels:
|
||||
- __meta_dockerswarm_service_name
|
||||
target_label: service_name
|
||||
- source_labels:
|
||||
- __meta_dockerswarm_task_id
|
||||
target_label: service_instance_id
|
||||
- source_labels:
|
||||
- __meta_dockerswarm_node_hostname
|
||||
target_label: host_name
|
||||
processors:
|
||||
batch:
|
||||
send_batch_size: 10000
|
||||
send_batch_max_size: 11000
|
||||
timeout: 10s
|
||||
resourcedetection:
|
||||
detectors:
|
||||
- env
|
||||
- system
|
||||
timeout: 2s
|
||||
extensions:
|
||||
health_check:
|
||||
endpoint: 0.0.0.0:13133
|
||||
pprof:
|
||||
endpoint: 0.0.0.0:1777
|
||||
exporters:
|
||||
otlp:
|
||||
endpoint: ${env:SIGNOZ_COLLECTOR_ENDPOINT}
|
||||
tls:
|
||||
insecure: true
|
||||
headers:
|
||||
signoz-access-token: ${env:SIGNOZ_ACCESS_TOKEN}
|
||||
# debug: {}
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
encoding: json
|
||||
metrics:
|
||||
address: 0.0.0.0:8888
|
||||
extensions:
|
||||
- health_check
|
||||
- pprof
|
||||
pipelines:
|
||||
metrics:
|
||||
receivers: [prometheus]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [otlp]
|
||||
@@ -1,39 +0,0 @@
|
||||
version: "3"
|
||||
x-common: &common
|
||||
networks:
|
||||
- signoz-net
|
||||
extra_hosts:
|
||||
- host.docker.internal:host-gateway
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
restart: unless-stopped
|
||||
services:
|
||||
hotrod:
|
||||
<<: *common
|
||||
image: jaegertracing/example-hotrod:1.61.0
|
||||
container_name: hotrod
|
||||
command: [ "all" ]
|
||||
environment:
|
||||
- OTEL_EXPORTER_OTLP_ENDPOINT=http://host.docker.internal:4318 # In case of external SigNoz or cloud, update the endpoint and access token
|
||||
# - OTEL_OTLP_HEADERS=signoz-access-token=<your-access-token>
|
||||
load-hotrod:
|
||||
<<: *common
|
||||
image: "signoz/locust:1.2.3"
|
||||
container_name: load-hotrod
|
||||
environment:
|
||||
ATTACKED_HOST: http://hotrod:8080
|
||||
LOCUST_MODE: standalone
|
||||
NO_PROXY: standalone
|
||||
TASK_DELAY_FROM: 5
|
||||
TASK_DELAY_TO: 30
|
||||
QUIET_MODE: "${QUIET_MODE:-false}"
|
||||
LOCUST_OPTS: "--headless -u 10 -r 1"
|
||||
volumes:
|
||||
- ../../../common/locust-scripts:/locust
|
||||
|
||||
networks:
|
||||
signoz-net:
|
||||
name: signoz-net
|
||||
external: true
|
||||
@@ -1,43 +0,0 @@
|
||||
version: "3"
|
||||
x-common: &common
|
||||
networks:
|
||||
- signoz-net
|
||||
extra_hosts:
|
||||
- host.docker.internal:host-gateway
|
||||
logging:
|
||||
options:
|
||||
max-size: 50m
|
||||
max-file: "3"
|
||||
restart: unless-stopped
|
||||
services:
|
||||
otel-agent:
|
||||
<<: *common
|
||||
image: otel/opentelemetry-collector-contrib:0.111.0
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
- /:/hostfs:ro
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
environment:
|
||||
- SIGNOZ_COLLECTOR_ENDPOINT=http://host.docker.internal:4317 # In case of external SigNoz or cloud, update the endpoint and access token
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux # Replace signoz-host with the actual hostname
|
||||
# - SIGNOZ_ACCESS_TOKEN="<your-access-token>"
|
||||
# Before exposing the ports, make sure the ports are not used by other services
|
||||
# ports:
|
||||
# - "4317:4317"
|
||||
# - "4318:4318"
|
||||
logspout:
|
||||
<<: *common
|
||||
image: "gliderlabs/logspout:v3.2.14"
|
||||
volumes:
|
||||
- /etc/hostname:/etc/host_hostname:ro
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command: syslog+tcp://otel-agent:2255
|
||||
depends_on:
|
||||
- otel-agent
|
||||
|
||||
networks:
|
||||
signoz-net:
|
||||
name: signoz-net
|
||||
external: true
|
||||
@@ -1,139 +0,0 @@
|
||||
receivers:
|
||||
hostmetrics:
|
||||
collection_interval: 30s
|
||||
root_path: /hostfs
|
||||
scrapers:
|
||||
cpu: {}
|
||||
load: {}
|
||||
memory: {}
|
||||
disk: {}
|
||||
filesystem: {}
|
||||
network: {}
|
||||
otlp:
|
||||
protocols:
|
||||
grpc:
|
||||
endpoint: 0.0.0.0:4317
|
||||
http:
|
||||
endpoint: 0.0.0.0:4318
|
||||
prometheus:
|
||||
config:
|
||||
global:
|
||||
scrape_interval: 60s
|
||||
scrape_configs:
|
||||
- job_name: otel-collector
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost:8888
|
||||
labels:
|
||||
job_name: otel-collector
|
||||
# For Docker daemon metrics to be scraped, it must be configured to expose
|
||||
# Prometheus metrics, as documented here: https://docs.docker.com/config/daemon/prometheus/
|
||||
# - job_name: docker-daemon
|
||||
# static_configs:
|
||||
# - targets:
|
||||
# - host.docker.internal:9323
|
||||
# labels:
|
||||
# job_name: docker-daemon
|
||||
- job_name: docker-container
|
||||
docker_sd_configs:
|
||||
- host: unix:///var/run/docker.sock
|
||||
relabel_configs:
|
||||
- action: keep
|
||||
regex: true
|
||||
source_labels:
|
||||
- __meta_docker_container_label_signoz_io_scrape
|
||||
- regex: true
|
||||
source_labels:
|
||||
- __meta_docker_container_label_signoz_io_path
|
||||
target_label: __metrics_path__
|
||||
- regex: (.+)
|
||||
source_labels:
|
||||
- __meta_docker_container_label_signoz_io_path
|
||||
target_label: __metrics_path__
|
||||
- separator: ":"
|
||||
source_labels:
|
||||
- __meta_docker_network_ip
|
||||
- __meta_docker_container_label_signoz_io_port
|
||||
target_label: __address__
|
||||
- regex: '/(.*)'
|
||||
replacement: '$1'
|
||||
source_labels:
|
||||
- __meta_docker_container_name
|
||||
target_label: container_name
|
||||
- regex: __meta_docker_container_label_signoz_io_(.+)
|
||||
action: labelmap
|
||||
replacement: $1
|
||||
tcplog/docker:
|
||||
listen_address: "0.0.0.0:2255"
|
||||
operators:
|
||||
- type: regex_parser
|
||||
regex: '^<([0-9]+)>[0-9]+ (?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P<container_id>\S+) (?P<container_name>\S+) [0-9]+ - -( (?P<body>.*))?'
|
||||
timestamp:
|
||||
parse_from: attributes.timestamp
|
||||
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
|
||||
- type: move
|
||||
from: attributes["body"]
|
||||
to: body
|
||||
- type: remove
|
||||
field: attributes.timestamp
|
||||
# please remove names from below if you want to collect logs from them
|
||||
- type: filter
|
||||
id: signoz_logs_filter
|
||||
expr: 'attributes.container_name matches "^signoz|(signoz-(|otel-collector|clickhouse|zookeeper))|(infra-(logspout|otel-agent)-.*)"'
|
||||
processors:
|
||||
batch:
|
||||
send_batch_size: 10000
|
||||
send_batch_max_size: 11000
|
||||
timeout: 10s
|
||||
resourcedetection:
|
||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
||||
detectors:
|
||||
# - ec2
|
||||
# - gcp
|
||||
# - azure
|
||||
- env
|
||||
- system
|
||||
timeout: 2s
|
||||
extensions:
|
||||
health_check:
|
||||
endpoint: 0.0.0.0:13133
|
||||
pprof:
|
||||
endpoint: 0.0.0.0:1777
|
||||
exporters:
|
||||
otlp:
|
||||
endpoint: ${env:SIGNOZ_COLLECTOR_ENDPOINT}
|
||||
tls:
|
||||
insecure: true
|
||||
headers:
|
||||
signoz-access-token: ${env:SIGNOZ_ACCESS_TOKEN}
|
||||
# debug: {}
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
encoding: json
|
||||
metrics:
|
||||
address: 0.0.0.0:8888
|
||||
extensions:
|
||||
- health_check
|
||||
- pprof
|
||||
pipelines:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [otlp]
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [otlp]
|
||||
metrics/hostmetrics:
|
||||
receivers: [hostmetrics]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [otlp]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [otlp]
|
||||
logs:
|
||||
receivers: [otlp, tcplog/docker]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [otlp]
|
||||
@@ -3001,68 +3001,6 @@ paths:
|
||||
summary: Update auth domain
|
||||
tags:
|
||||
- authdomains
|
||||
/api/v1/export_raw_data:
|
||||
post:
|
||||
deprecated: false
|
||||
description: This endpoints allows complex query exporting raw data for traces
|
||||
and logs
|
||||
operationId: HandleExportRawDataPOST
|
||||
parameters:
|
||||
- description: The output format for the export.
|
||||
in: query
|
||||
name: format
|
||||
schema:
|
||||
default: csv
|
||||
description: The output format for the export.
|
||||
enum:
|
||||
- csv
|
||||
- jsonl
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Export raw data
|
||||
tags:
|
||||
- logs
|
||||
- traces
|
||||
/api/v1/fields/keys:
|
||||
get:
|
||||
deprecated: false
|
||||
|
||||
@@ -123,7 +123,6 @@ if err := router.Handle("/api/v1/things", handler.New(
|
||||
Description: "This endpoint creates a thing",
|
||||
Request: new(types.PostableThing),
|
||||
RequestContentType: "application/json",
|
||||
RequestQuery: new(types.QueryableThing),
|
||||
Response: new(types.GettableThing),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
@@ -156,8 +155,6 @@ The `handler.New` function ties the HTTP handler to OpenAPI metadata via `OpenAP
|
||||
- **Request / RequestContentType**:
|
||||
- `Request` is a Go type that describes the request body or form.
|
||||
- `RequestContentType` is usually `"application/json"` or `"application/x-www-form-urlencoded"` (for callbacks like SAML).
|
||||
- **RequestQuery**:
|
||||
- `RequestQuery` is a Go type that descirbes query url params.
|
||||
- **RequestExamples**: An array of `handler.OpenAPIExample` that provide concrete request payloads in the generated spec. See [Adding request examples](#adding-request-examples) below.
|
||||
- **Response / ResponseContentType**:
|
||||
- `Response` is the Go type for the successful response payload.
|
||||
|
||||
@@ -2,6 +2,7 @@ package anomaly
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"math"
|
||||
"time"
|
||||
|
||||
@@ -13,7 +14,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/instrumentationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -67,7 +67,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
|
||||
instrumentationtypes.CodeNamespace: "anomaly",
|
||||
instrumentationtypes.CodeFunctionName: "getResults",
|
||||
})
|
||||
zap.L().Info("fetching results for current period", zap.Any("currentPeriodQuery", params.CurrentPeriodQuery))
|
||||
slog.InfoContext(ctx, "fetching results for current period", "current_period_query", params.CurrentPeriodQuery)
|
||||
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -78,7 +78,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past period", zap.Any("pastPeriodQuery", params.PastPeriodQuery))
|
||||
slog.InfoContext(ctx, "fetching results for past period", "past_period_query", params.PastPeriodQuery)
|
||||
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastPeriodQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -89,7 +89,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for current season", zap.Any("currentSeasonQuery", params.CurrentSeasonQuery))
|
||||
slog.InfoContext(ctx, "fetching results for current season", "current_season_query", params.CurrentSeasonQuery)
|
||||
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -100,7 +100,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past season", zap.Any("pastSeasonQuery", params.PastSeasonQuery))
|
||||
slog.InfoContext(ctx, "fetching results for past season", "past_season_query", params.PastSeasonQuery)
|
||||
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastSeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -111,7 +111,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past 2 season", zap.Any("past2SeasonQuery", params.Past2SeasonQuery))
|
||||
slog.InfoContext(ctx, "fetching results for past 2 season", "past_2_season_query", params.Past2SeasonQuery)
|
||||
past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past2SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -122,7 +122,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
|
||||
return nil, err
|
||||
}
|
||||
|
||||
zap.L().Info("fetching results for past 3 season", zap.Any("past3SeasonQuery", params.Past3SeasonQuery))
|
||||
slog.InfoContext(ctx, "fetching results for past 3 season", "past_3_season_query", params.Past3SeasonQuery)
|
||||
past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past3SeasonQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -235,17 +235,17 @@ func (p *BaseSeasonalProvider) getPredictedSeries(
|
||||
if predictedValue < 0 {
|
||||
// this should not happen (except when the data has extreme outliers)
|
||||
// we will use the moving avg of the previous period series in this case
|
||||
zap.L().Warn("predictedValue is less than 0", zap.Float64("predictedValue", predictedValue), zap.Any("labels", series.Labels))
|
||||
slog.Warn("predicted value is less than 0", "predicted_value", predictedValue, "labels", series.Labels)
|
||||
predictedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
|
||||
}
|
||||
|
||||
zap.L().Debug("predictedSeries",
|
||||
zap.Float64("movingAvg", movingAvg),
|
||||
zap.Float64("avg", avg),
|
||||
zap.Float64("mean", mean),
|
||||
zap.Any("labels", series.Labels),
|
||||
zap.Float64("predictedValue", predictedValue),
|
||||
zap.Float64("curr", curr.Value),
|
||||
slog.Debug("predicted series",
|
||||
"moving_avg", movingAvg,
|
||||
"avg", avg,
|
||||
"mean", mean,
|
||||
"labels", series.Labels,
|
||||
"predicted_value", predictedValue,
|
||||
"curr", curr.Value,
|
||||
)
|
||||
predictedSeries.Points = append(predictedSeries.Points, v3.Point{
|
||||
Timestamp: curr.Timestamp,
|
||||
@@ -418,7 +418,7 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
|
||||
|
||||
for _, series := range result.Series {
|
||||
stdDev := p.getStdDev(series)
|
||||
zap.L().Info("stdDev", zap.Float64("stdDev", stdDev), zap.Any("labels", series.Labels))
|
||||
slog.InfoContext(ctx, "computed standard deviation", "std_dev", stdDev, "labels", series.Labels)
|
||||
|
||||
pastPeriodSeries := p.getMatchingSeries(pastPeriodResult, series)
|
||||
currentSeasonSeries := p.getMatchingSeries(currentSeasonResult, series)
|
||||
@@ -431,7 +431,7 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
|
||||
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
|
||||
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
|
||||
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
|
||||
zap.L().Info("getAvg", zap.Float64("prevSeriesAvg", prevSeriesAvg), zap.Float64("currentSeasonSeriesAvg", currentSeasonSeriesAvg), zap.Float64("pastSeasonSeriesAvg", pastSeasonSeriesAvg), zap.Float64("past2SeasonSeriesAvg", past2SeasonSeriesAvg), zap.Float64("past3SeasonSeriesAvg", past3SeasonSeriesAvg), zap.Any("labels", series.Labels))
|
||||
slog.InfoContext(ctx, "computed averages", "prev_series_avg", prevSeriesAvg, "current_season_series_avg", currentSeasonSeriesAvg, "past_season_series_avg", pastSeasonSeriesAvg, "past_2_season_series_avg", past2SeasonSeriesAvg, "past_3_season_series_avg", past3SeasonSeriesAvg, "labels", series.Labels)
|
||||
|
||||
predictedSeries := p.getPredictedSeries(
|
||||
series,
|
||||
|
||||
@@ -18,7 +18,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
type CloudIntegrationConnectionParamsResponse struct {
|
||||
@@ -71,7 +71,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
// Return the API Key (PAT) even if the rest of the params can not be deduced.
|
||||
// Params not returned from here will be requested from the user via form inputs.
|
||||
// This enables gracefully degraded but working experience even for non-cloud deployments.
|
||||
zap.L().Info("ingestion params and signoz api url can not be deduced since no license was found")
|
||||
slog.InfoContext(r.Context(), "ingestion params and signoz api url can not be deduced since no license was found")
|
||||
ah.Respond(w, result)
|
||||
return
|
||||
}
|
||||
@@ -103,7 +103,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
result.IngestionKey = ingestionKey
|
||||
|
||||
} else {
|
||||
zap.L().Info("ingestion key can't be deduced since no gateway url has been configured")
|
||||
slog.InfoContext(r.Context(), "ingestion key can't be deduced since no gateway url has been configured")
|
||||
}
|
||||
|
||||
ah.Respond(w, result)
|
||||
@@ -138,9 +138,8 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
}
|
||||
}
|
||||
|
||||
zap.L().Info(
|
||||
"no PAT found for cloud integration, creating a new one",
|
||||
zap.String("cloudProvider", cloudProvider),
|
||||
slog.InfoContext(ctx, "no PAT found for cloud integration, creating a new one",
|
||||
"cloud_provider", cloudProvider,
|
||||
)
|
||||
|
||||
newPAT, err := types.NewStorableAPIKey(
|
||||
@@ -287,9 +286,8 @@ func getOrCreateCloudProviderIngestionKey(
|
||||
}
|
||||
}
|
||||
|
||||
zap.L().Info(
|
||||
"no existing ingestion key found for cloud integration, creating a new one",
|
||||
zap.String("cloudProvider", cloudProvider),
|
||||
slog.InfoContext(ctx, "no existing ingestion key found for cloud integration, creating a new one",
|
||||
"cloud_provider", cloudProvider,
|
||||
)
|
||||
createKeyResult, apiErr := requestGateway[createIngestionKeyResponse](
|
||||
ctx, gatewayUrl, licenseKey, "/v1/workspaces/me/keys",
|
||||
|
||||
@@ -15,7 +15,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||
@@ -35,23 +35,23 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
if constants.FetchFeatures == "true" {
|
||||
zap.L().Debug("fetching license")
|
||||
slog.DebugContext(ctx, "fetching license")
|
||||
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to fetch license", zap.Error(err))
|
||||
slog.ErrorContext(ctx, "failed to fetch license", "error", err)
|
||||
} else if license == nil {
|
||||
zap.L().Debug("no active license found")
|
||||
slog.DebugContext(ctx, "no active license found")
|
||||
} else {
|
||||
licenseKey := license.Key
|
||||
|
||||
zap.L().Debug("fetching zeus features")
|
||||
slog.DebugContext(ctx, "fetching zeus features")
|
||||
zeusFeatures, err := fetchZeusFeatures(constants.ZeusFeaturesURL, licenseKey)
|
||||
if err == nil {
|
||||
zap.L().Debug("fetched zeus features", zap.Any("features", zeusFeatures))
|
||||
slog.DebugContext(ctx, "fetched zeus features", "features", zeusFeatures)
|
||||
// merge featureSet and zeusFeatures in featureSet with higher priority to zeusFeatures
|
||||
featureSet = MergeFeatureSets(zeusFeatures, featureSet)
|
||||
} else {
|
||||
zap.L().Error("failed to fetch zeus features", zap.Error(err))
|
||||
slog.ErrorContext(ctx, "failed to fetch zeus features", "error", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ import (
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
@@ -35,7 +35,7 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
queryRangeParams, apiErrorObj := baseapp.ParseQueryRangeParams(r)
|
||||
|
||||
if apiErrorObj != nil {
|
||||
zap.L().Error("error parsing metric query range params", zap.Error(apiErrorObj.Err))
|
||||
slog.ErrorContext(r.Context(), "error parsing metric query range params", "error", apiErrorObj.Err)
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
@@ -44,7 +44,7 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
// add temporality for each metric
|
||||
temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams)
|
||||
if temporalityErr != nil {
|
||||
zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr))
|
||||
slog.ErrorContext(r.Context(), "error while adding temporality for metrics", "error", temporalityErr)
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -47,7 +47,7 @@ import (
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"go.uber.org/zap"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
// Server runs HTTP, Mux and a grpc server
|
||||
@@ -83,6 +83,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
}
|
||||
|
||||
reader := clickhouseReader.NewReader(
|
||||
signoz.Instrumentation.Logger(),
|
||||
signoz.SQLStore,
|
||||
signoz.TelemetryStore,
|
||||
signoz.Prometheus,
|
||||
@@ -216,8 +217,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
}),
|
||||
otelmux.WithPublicEndpoint(),
|
||||
))
|
||||
r.Use(middleware.NewAuthN([]string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Tokenizer, s.signoz.Instrumentation.Logger()).Wrap)
|
||||
r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap)
|
||||
r.Use(middleware.NewIdentN(s.signoz.IdentNResolver, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap)
|
||||
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
|
||||
s.config.APIServer.Timeout.ExcludedRoutes,
|
||||
s.config.APIServer.Timeout.Default,
|
||||
@@ -278,7 +278,7 @@ func (s *Server) initListeners() error {
|
||||
return err
|
||||
}
|
||||
|
||||
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort))
|
||||
slog.Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort))
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -298,31 +298,31 @@ func (s *Server) Start(ctx context.Context) error {
|
||||
}
|
||||
|
||||
go func() {
|
||||
zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.httpHostPort))
|
||||
slog.Info("Starting HTTP server", "port", httpPort, "addr", s.httpHostPort)
|
||||
|
||||
switch err := s.httpServer.Serve(s.httpConn); err {
|
||||
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
|
||||
// normal exit, nothing to do
|
||||
default:
|
||||
zap.L().Error("Could not start HTTP server", zap.Error(err))
|
||||
slog.Error("Could not start HTTP server", "error", err)
|
||||
}
|
||||
s.unavailableChannel <- healthcheck.Unavailable
|
||||
}()
|
||||
|
||||
go func() {
|
||||
zap.L().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort))
|
||||
slog.Info("Starting pprof server", "addr", baseconst.DebugHttpPort)
|
||||
|
||||
err = http.ListenAndServe(baseconst.DebugHttpPort, nil)
|
||||
if err != nil {
|
||||
zap.L().Error("Could not start pprof server", zap.Error(err))
|
||||
slog.Error("Could not start pprof server", "error", err)
|
||||
}
|
||||
}()
|
||||
|
||||
go func() {
|
||||
zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint))
|
||||
slog.Info("Starting OpAmp Websocket server", "addr", baseconst.OpAmpWsEndpoint)
|
||||
err := s.opampServer.Start(baseconst.OpAmpWsEndpoint)
|
||||
if err != nil {
|
||||
zap.L().Error("opamp ws server failed to start", zap.Error(err))
|
||||
slog.Error("opamp ws server failed to start", "error", err)
|
||||
s.unavailableChannel <- healthcheck.Unavailable
|
||||
}
|
||||
}()
|
||||
@@ -358,10 +358,9 @@ func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertma
|
||||
MetadataStore: metadataStore,
|
||||
Prometheus: prometheus,
|
||||
Context: context.Background(),
|
||||
Logger: zap.L(),
|
||||
Reader: ch,
|
||||
Querier: querier,
|
||||
SLogger: providerSettings.Logger,
|
||||
Logger: providerSettings.Logger,
|
||||
Cache: cache,
|
||||
EvalDelay: baseconst.GetEvalDelay(),
|
||||
PrepareTaskFunc: rules.PrepareTaskFunc,
|
||||
@@ -380,7 +379,7 @@ func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertma
|
||||
return nil, fmt.Errorf("rule manager error: %v", err)
|
||||
}
|
||||
|
||||
zap.L().Info("rules manager is ready")
|
||||
slog.Info("rules manager is ready")
|
||||
|
||||
return manager, nil
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@@ -116,7 +117,7 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
|
||||
options := clickhouseReader.NewOptions("primaryNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
||||
reader := clickhouseReader.NewReader(slog.Default(), nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
||||
|
||||
rule, err := NewAnomalyRule(
|
||||
"test-anomaly-rule",
|
||||
@@ -247,7 +248,7 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
|
||||
options := clickhouseReader.NewOptions("primaryNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
||||
reader := clickhouseReader.NewReader(slog.Default(), nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
||||
|
||||
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
@@ -13,7 +13,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"go.uber.org/zap"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) {
|
||||
@@ -34,7 +34,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.Rule,
|
||||
opts.Reader,
|
||||
opts.Querier,
|
||||
opts.SLogger,
|
||||
opts.Logger,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
@@ -57,7 +57,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
ruleId,
|
||||
opts.OrgID,
|
||||
opts.Rule,
|
||||
opts.SLogger,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
@@ -82,7 +82,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.Rule,
|
||||
opts.Reader,
|
||||
opts.Querier,
|
||||
opts.SLogger,
|
||||
opts.Logger,
|
||||
opts.Cache,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
@@ -142,7 +142,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
parsedRule,
|
||||
opts.Reader,
|
||||
opts.Querier,
|
||||
opts.SLogger,
|
||||
opts.Logger,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
@@ -151,7 +151,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", alertname), zap.Error(err))
|
||||
slog.Error("failed to prepare a new threshold rule for test", "name", alertname, "error", err)
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
|
||||
@@ -162,7 +162,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
alertname,
|
||||
opts.OrgID,
|
||||
parsedRule,
|
||||
opts.SLogger,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
baserules.WithSendAlways(),
|
||||
@@ -173,7 +173,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", alertname), zap.Error(err))
|
||||
slog.Error("failed to prepare a new promql rule for test", "name", alertname, "error", err)
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
} else if parsedRule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
@@ -184,7 +184,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
parsedRule,
|
||||
opts.Reader,
|
||||
opts.Querier,
|
||||
opts.SLogger,
|
||||
opts.Logger,
|
||||
opts.Cache,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
@@ -193,7 +193,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", alertname), zap.Error(err))
|
||||
slog.Error("failed to prepare a new anomaly rule for test", "name", alertname, "error", err)
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
} else {
|
||||
@@ -205,7 +205,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
|
||||
alertsFound, err := rule.Eval(ctx, ts)
|
||||
if err != nil {
|
||||
zap.L().Error("evaluating rule failed", zap.String("rule", rule.Name()), zap.Error(err))
|
||||
slog.Error("evaluating rule failed", "rule", rule.Name(), "error", err)
|
||||
return 0, basemodel.InternalError(fmt.Errorf("rule evaluation failed"))
|
||||
}
|
||||
rule.SendAlerts(ctx, ts, 0, time.Minute, opts.NotifyFunc)
|
||||
|
||||
@@ -8,12 +8,12 @@ import (
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"log/slog"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/go-co-op/gocron"
|
||||
"github.com/google/uuid"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
@@ -76,19 +76,19 @@ func (lm *Manager) Start(ctx context.Context) error {
|
||||
func (lm *Manager) UploadUsage(ctx context.Context) {
|
||||
organizations, err := lm.orgGetter.ListByOwnedKeyRange(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to get organizations", zap.Error(err))
|
||||
slog.ErrorContext(ctx, "failed to get organizations", "error", err)
|
||||
return
|
||||
}
|
||||
for _, organization := range organizations {
|
||||
// check if license is present or not
|
||||
license, err := lm.licenseService.GetActive(ctx, organization.ID)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to get active license", zap.Error(err))
|
||||
slog.ErrorContext(ctx, "failed to get active license", "error", err)
|
||||
return
|
||||
}
|
||||
if license == nil {
|
||||
// we will not start the usage reporting if license is not present.
|
||||
zap.L().Info("no license present, skipping usage reporting")
|
||||
slog.InfoContext(ctx, "no license present, skipping usage reporting")
|
||||
return
|
||||
}
|
||||
|
||||
@@ -115,7 +115,7 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
|
||||
dbusages := []model.UsageDB{}
|
||||
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
|
||||
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
|
||||
zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err))
|
||||
slog.ErrorContext(ctx, "failed to get usage from clickhouse", "error", err)
|
||||
return
|
||||
}
|
||||
for _, u := range dbusages {
|
||||
@@ -125,24 +125,24 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
|
||||
}
|
||||
|
||||
if len(usages) <= 0 {
|
||||
zap.L().Info("no snapshots to upload, skipping.")
|
||||
slog.InfoContext(ctx, "no snapshots to upload, skipping")
|
||||
return
|
||||
}
|
||||
|
||||
zap.L().Info("uploading usage data")
|
||||
slog.InfoContext(ctx, "uploading usage data")
|
||||
|
||||
usagesPayload := []model.Usage{}
|
||||
for _, usage := range usages {
|
||||
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
|
||||
if err != nil {
|
||||
zap.L().Error("error while decrypting usage data: %v", zap.Error(err))
|
||||
slog.ErrorContext(ctx, "error while decrypting usage data", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
usageData := model.Usage{}
|
||||
err = json.Unmarshal(usageDataBytes, &usageData)
|
||||
if err != nil {
|
||||
zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err))
|
||||
slog.ErrorContext(ctx, "error while unmarshalling usage data", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -163,13 +163,13 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
|
||||
|
||||
body, errv2 := json.Marshal(payload)
|
||||
if errv2 != nil {
|
||||
zap.L().Error("error while marshalling usage payload: %v", zap.Error(errv2))
|
||||
slog.ErrorContext(ctx, "error while marshalling usage payload", "error", errv2)
|
||||
return
|
||||
}
|
||||
|
||||
errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body)
|
||||
if errv2 != nil {
|
||||
zap.L().Error("failed to upload usage: %v", zap.Error(errv2))
|
||||
slog.ErrorContext(ctx, "failed to upload usage", "error", errv2)
|
||||
// not returning error here since it is captured in the failed count
|
||||
return
|
||||
}
|
||||
@@ -179,7 +179,7 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
|
||||
func (lm *Manager) Stop(ctx context.Context) {
|
||||
lm.scheduler.Stop()
|
||||
|
||||
zap.L().Info("sending usage data before shutting down")
|
||||
slog.InfoContext(ctx, "sending usage data before shutting down")
|
||||
// send usage before shutting down
|
||||
lm.UploadUsage(ctx)
|
||||
atomic.StoreUint32(&locker, stateUnlocked)
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"database/sql"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
@@ -32,9 +33,9 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
fmter: fmter,
|
||||
settings: settings,
|
||||
operator: sqlschema.NewOperator(fmter, sqlschema.OperatorSupport{
|
||||
DropConstraint: true,
|
||||
ColumnIfNotExistsExists: true,
|
||||
AlterColumnSetNotNull: true,
|
||||
SCreateAndDropConstraint: true,
|
||||
SAlterTableAddAndDropColumnIfNotExistsAndExists: true,
|
||||
SAlterTableAlterColumnSetAndDrop: true,
|
||||
}),
|
||||
}, nil
|
||||
}
|
||||
@@ -72,8 +73,9 @@ WHERE
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
if len(columns) == 0 {
|
||||
return nil, nil, sql.ErrNoRows
|
||||
return nil, nil, provider.sqlstore.WrapNotFoundErrf(sql.ErrNoRows, errors.CodeNotFound, "table (%s) not found", tableName)
|
||||
}
|
||||
|
||||
sqlschemaColumns := make([]*sqlschema.Column, 0)
|
||||
@@ -220,7 +222,9 @@ SELECT
|
||||
ci.relname AS index_name,
|
||||
i.indisunique AS unique,
|
||||
i.indisprimary AS primary,
|
||||
a.attname AS column_name
|
||||
a.attname AS column_name,
|
||||
array_position(i.indkey, a.attnum) AS column_position,
|
||||
pg_get_expr(i.indpred, i.indrelid) AS predicate
|
||||
FROM
|
||||
pg_index i
|
||||
LEFT JOIN pg_class ct ON ct.oid = i.indrelid
|
||||
@@ -231,9 +235,10 @@ WHERE
|
||||
a.attnum = ANY(i.indkey)
|
||||
AND con.oid IS NULL
|
||||
AND ct.relkind = 'r'
|
||||
AND ct.relname = ?`, string(name))
|
||||
AND ct.relname = ?
|
||||
ORDER BY index_name, column_position`, string(name))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, provider.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "no indices for table (%s) found", name)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
@@ -242,7 +247,12 @@ WHERE
|
||||
}
|
||||
}()
|
||||
|
||||
uniqueIndicesMap := make(map[string]*sqlschema.UniqueIndex)
|
||||
type indexEntry struct {
|
||||
columns []sqlschema.ColumnName
|
||||
predicate *string
|
||||
}
|
||||
|
||||
uniqueIndicesMap := make(map[string]*indexEntry)
|
||||
for rows.Next() {
|
||||
var (
|
||||
tableName string
|
||||
@@ -250,27 +260,53 @@ WHERE
|
||||
unique bool
|
||||
primary bool
|
||||
columnName string
|
||||
// starts from 0 and is unused in this function, this is to ensure that the column names are in the correct order
|
||||
columnPosition int
|
||||
predicate *string
|
||||
)
|
||||
|
||||
if err := rows.Scan(&tableName, &indexName, &unique, &primary, &columnName); err != nil {
|
||||
if err := rows.Scan(&tableName, &indexName, &unique, &primary, &columnName, &columnPosition, &predicate); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if unique {
|
||||
if _, ok := uniqueIndicesMap[indexName]; !ok {
|
||||
uniqueIndicesMap[indexName] = &sqlschema.UniqueIndex{
|
||||
TableName: name,
|
||||
ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(columnName)},
|
||||
uniqueIndicesMap[indexName] = &indexEntry{
|
||||
columns: []sqlschema.ColumnName{sqlschema.ColumnName(columnName)},
|
||||
predicate: predicate,
|
||||
}
|
||||
} else {
|
||||
uniqueIndicesMap[indexName].ColumnNames = append(uniqueIndicesMap[indexName].ColumnNames, sqlschema.ColumnName(columnName))
|
||||
uniqueIndicesMap[indexName].columns = append(uniqueIndicesMap[indexName].columns, sqlschema.ColumnName(columnName))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
indices := make([]sqlschema.Index, 0)
|
||||
for _, index := range uniqueIndicesMap {
|
||||
indices = append(indices, index)
|
||||
for indexName, entry := range uniqueIndicesMap {
|
||||
if entry.predicate != nil {
|
||||
index := &sqlschema.PartialUniqueIndex{
|
||||
TableName: name,
|
||||
ColumnNames: entry.columns,
|
||||
Where: *entry.predicate,
|
||||
}
|
||||
|
||||
if index.Name() == indexName {
|
||||
indices = append(indices, index)
|
||||
} else {
|
||||
indices = append(indices, index.Named(indexName))
|
||||
}
|
||||
} else {
|
||||
index := &sqlschema.UniqueIndex{
|
||||
TableName: name,
|
||||
ColumnNames: entry.columns,
|
||||
}
|
||||
|
||||
if index.Name() == indexName {
|
||||
indices = append(indices, index)
|
||||
} else {
|
||||
indices = append(indices, index.Named(indexName))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return indices, nil
|
||||
|
||||
29
frontend/__mocks__/resizableMock.tsx
Normal file
29
frontend/__mocks__/resizableMock.tsx
Normal file
@@ -0,0 +1,29 @@
|
||||
import { PropsWithChildren } from 'react';
|
||||
|
||||
type CommonProps = PropsWithChildren<{
|
||||
className?: string;
|
||||
minSize?: number;
|
||||
maxSize?: number;
|
||||
defaultSize?: number;
|
||||
direction?: 'horizontal' | 'vertical';
|
||||
autoSaveId?: string;
|
||||
withHandle?: boolean;
|
||||
}>;
|
||||
|
||||
export function ResizablePanelGroup({
|
||||
children,
|
||||
className,
|
||||
}: CommonProps): JSX.Element {
|
||||
return <div className={className}>{children}</div>;
|
||||
}
|
||||
|
||||
export function ResizablePanel({
|
||||
children,
|
||||
className,
|
||||
}: CommonProps): JSX.Element {
|
||||
return <div className={className}>{children}</div>;
|
||||
}
|
||||
|
||||
export function ResizableHandle({ className }: CommonProps): JSX.Element {
|
||||
return <div className={className} />;
|
||||
}
|
||||
@@ -14,6 +14,7 @@ const config: Config.InitialOptions = {
|
||||
'\\.(css|less|scss)$': '<rootDir>/__mocks__/cssMock.ts',
|
||||
'\\.md$': '<rootDir>/__mocks__/cssMock.ts',
|
||||
'^uplot$': '<rootDir>/__mocks__/uplotMock.ts',
|
||||
'^@signozhq/resizable$': '<rootDir>/__mocks__/resizableMock.tsx',
|
||||
'^hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
||||
'^src/hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
||||
'^.*/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
||||
|
||||
@@ -64,7 +64,7 @@
|
||||
"@signozhq/sonner": "0.1.0",
|
||||
"@signozhq/switch": "0.0.2",
|
||||
"@signozhq/table": "0.3.7",
|
||||
"@signozhq/toggle-group": "^0.0.1",
|
||||
"@signozhq/toggle-group": "0.0.1",
|
||||
"@signozhq/tooltip": "0.0.2",
|
||||
"@tanstack/react-table": "8.20.6",
|
||||
"@tanstack/react-virtual": "3.11.2",
|
||||
|
||||
@@ -20,113 +20,11 @@ import { useMutation, useQuery } from 'react-query';
|
||||
import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
|
||||
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
|
||||
import type {
|
||||
HandleExportRawDataPOSTParams,
|
||||
ListPromotedAndIndexedPaths200,
|
||||
PromotetypesPromotePathDTO,
|
||||
Querybuildertypesv5QueryRangeRequestDTO,
|
||||
RenderErrorResponseDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
/**
|
||||
* This endpoints allows complex query exporting raw data for traces and logs
|
||||
* @summary Export raw data
|
||||
*/
|
||||
export const handleExportRawDataPOST = (
|
||||
querybuildertypesv5QueryRangeRequestDTO: BodyType<Querybuildertypesv5QueryRangeRequestDTO>,
|
||||
params?: HandleExportRawDataPOSTParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<string>({
|
||||
url: `/api/v1/export_raw_data`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: querybuildertypesv5QueryRangeRequestDTO,
|
||||
params,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getHandleExportRawDataPOSTMutationOptions = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
|
||||
TError,
|
||||
{
|
||||
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
params?: HandleExportRawDataPOSTParams;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
|
||||
TError,
|
||||
{
|
||||
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
params?: HandleExportRawDataPOSTParams;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['handleExportRawDataPOST'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
|
||||
{
|
||||
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
params?: HandleExportRawDataPOSTParams;
|
||||
}
|
||||
> = (props) => {
|
||||
const { data, params } = props ?? {};
|
||||
|
||||
return handleExportRawDataPOST(data, params);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type HandleExportRawDataPOSTMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>
|
||||
>;
|
||||
export type HandleExportRawDataPOSTMutationBody = BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
export type HandleExportRawDataPOSTMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Export raw data
|
||||
*/
|
||||
export const useHandleExportRawDataPOST = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
|
||||
TError,
|
||||
{
|
||||
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
params?: HandleExportRawDataPOSTParams;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
|
||||
TError,
|
||||
{
|
||||
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
params?: HandleExportRawDataPOSTParams;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getHandleExportRawDataPOSTMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoints promotes and indexes paths
|
||||
* @summary Promote and index paths
|
||||
|
||||
@@ -2896,19 +2896,6 @@ export type DeleteAuthDomainPathParameters = {
|
||||
export type UpdateAuthDomainPathParameters = {
|
||||
id: string;
|
||||
};
|
||||
export type HandleExportRawDataPOSTParams = {
|
||||
/**
|
||||
* @enum csv,jsonl
|
||||
* @type string
|
||||
* @description The output format for the export.
|
||||
*/
|
||||
format?: HandleExportRawDataPOSTFormat;
|
||||
};
|
||||
|
||||
export enum HandleExportRawDataPOSTFormat {
|
||||
csv = 'csv',
|
||||
jsonl = 'jsonl',
|
||||
}
|
||||
export type GetFieldsKeysParams = {
|
||||
/**
|
||||
* @description undefined
|
||||
|
||||
@@ -224,7 +224,7 @@ describe('TimeSeriesPanel utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('uses DrawStyle.Line and VisibilityMode.Never when series has multiple valid points', () => {
|
||||
it('uses DrawStyle.Line and showPoints false when series has multiple valid points', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
|
||||
@@ -10,9 +10,9 @@ import getLabelName from 'lib/getLabelName';
|
||||
import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin';
|
||||
import {
|
||||
DrawStyle,
|
||||
FillMode,
|
||||
LineInterpolation,
|
||||
LineStyle,
|
||||
VisibilityMode,
|
||||
} from 'lib/uPlotV2/config/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { isInvalidPlotValue } from 'lib/uPlotV2/utils/dataUtils';
|
||||
@@ -124,12 +124,12 @@ export const prepareUPlotConfig = ({
|
||||
label: label,
|
||||
colorMapping: widget.customLegendColors ?? {},
|
||||
spanGaps: true,
|
||||
lineStyle: LineStyle.Solid,
|
||||
lineInterpolation: LineInterpolation.Spline,
|
||||
showPoints: hasSingleValidPoint
|
||||
? VisibilityMode.Always
|
||||
: VisibilityMode.Never,
|
||||
lineStyle: widget.lineStyle || LineStyle.Solid,
|
||||
lineInterpolation: widget.lineInterpolation || LineInterpolation.Spline,
|
||||
showPoints:
|
||||
widget.showPoints || hasSingleValidPoint ? true : !!widget.showPoints,
|
||||
pointSize: 5,
|
||||
fillMode: widget.fillMode || FillMode.None,
|
||||
isDarkMode,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -65,6 +65,35 @@
|
||||
}
|
||||
}
|
||||
|
||||
.new-widget-container {
|
||||
.resizable-panel-left-container {
|
||||
overflow-x: hidden;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.resizable-panel-right-container {
|
||||
overflow-y: auto !important;
|
||||
min-width: 350px;
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
}
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: rgb(136, 136, 136);
|
||||
border-radius: 0.625rem;
|
||||
}
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
}
|
||||
|
||||
.widget-resizable-panel-group {
|
||||
.widget-resizable-handle {
|
||||
height: 100vh;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.edit-header {
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
@@ -81,4 +110,11 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.widget-resizable-panel-group {
|
||||
.bg-border {
|
||||
background: var(--bg-vanilla-300);
|
||||
border-color: var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,21 @@
|
||||
.fill-mode-selector {
|
||||
.fill-mode-icon {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
}
|
||||
|
||||
.fill-mode-label {
|
||||
text-transform: uppercase;
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.fill-mode-selector {
|
||||
.fill-mode-label {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
import { ToggleGroup, ToggleGroupItem } from '@signozhq/toggle-group';
|
||||
import { Typography } from 'antd';
|
||||
import { FillMode } from 'lib/uPlotV2/config/types';
|
||||
|
||||
import './FillModeSelector.styles.scss';
|
||||
|
||||
interface FillModeSelectorProps {
|
||||
value: FillMode;
|
||||
onChange: (value: FillMode) => void;
|
||||
}
|
||||
|
||||
export function FillModeSelector({
|
||||
value,
|
||||
onChange,
|
||||
}: FillModeSelectorProps): JSX.Element {
|
||||
return (
|
||||
<section className="fill-mode-selector control-container">
|
||||
<Typography.Text className="section-heading">Fill mode</Typography.Text>
|
||||
<ToggleGroup
|
||||
type="single"
|
||||
value={value}
|
||||
variant="outline"
|
||||
size="lg"
|
||||
onValueChange={(newValue): void => {
|
||||
if (newValue) {
|
||||
onChange(newValue as FillMode);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<ToggleGroupItem value={FillMode.None} aria-label="None" title="None">
|
||||
<svg
|
||||
className="fill-mode-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<rect x="8" y="16" width="32" height="16" stroke="#888" fill="none" />
|
||||
</svg>
|
||||
<Typography.Text className="section-heading-small">None</Typography.Text>
|
||||
</ToggleGroupItem>
|
||||
<ToggleGroupItem value={FillMode.Solid} aria-label="Solid" title="Solid">
|
||||
<svg
|
||||
className="fill-mode-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<rect x="8" y="16" width="32" height="16" fill="#888" />
|
||||
</svg>
|
||||
<Typography.Text className="section-heading-small">Solid</Typography.Text>
|
||||
</ToggleGroupItem>
|
||||
<ToggleGroupItem
|
||||
value={FillMode.Gradient}
|
||||
aria-label="Gradient"
|
||||
title="Gradient"
|
||||
>
|
||||
<svg
|
||||
className="fill-mode-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<defs>
|
||||
<linearGradient id="fill-gradient" x1="0" y1="0" x2="1" y2="0">
|
||||
<stop offset="0%" stopColor="#888" stopOpacity="0.2" />
|
||||
<stop offset="100%" stopColor="#888" stopOpacity="0.8" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect
|
||||
x="8"
|
||||
y="16"
|
||||
width="32"
|
||||
height="16"
|
||||
fill="url(#fill-gradient)"
|
||||
stroke="#888"
|
||||
/>
|
||||
</svg>
|
||||
<Typography.Text className="section-heading-small">
|
||||
Gradient
|
||||
</Typography.Text>
|
||||
</ToggleGroupItem>
|
||||
</ToggleGroup>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
.line-interpolation-selector {
|
||||
.line-interpolation-icon {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
}
|
||||
|
||||
.line-interpolation-label {
|
||||
text-transform: uppercase;
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.line-interpolation-selector {
|
||||
.line-interpolation-label {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
import { ToggleGroup, ToggleGroupItem } from '@signozhq/toggle-group';
|
||||
import { Typography } from 'antd';
|
||||
import { LineInterpolation } from 'lib/uPlotV2/config/types';
|
||||
|
||||
import './LineInterpolationSelector.styles.scss';
|
||||
|
||||
interface LineInterpolationSelectorProps {
|
||||
value: LineInterpolation;
|
||||
onChange: (value: LineInterpolation) => void;
|
||||
}
|
||||
|
||||
export function LineInterpolationSelector({
|
||||
value,
|
||||
onChange,
|
||||
}: LineInterpolationSelectorProps): JSX.Element {
|
||||
return (
|
||||
<section className="line-interpolation-selector control-container">
|
||||
<Typography.Text className="section-heading">
|
||||
Line interpolation
|
||||
</Typography.Text>
|
||||
<ToggleGroup
|
||||
type="single"
|
||||
value={value}
|
||||
variant="outline"
|
||||
size="lg"
|
||||
onValueChange={(newValue): void => {
|
||||
if (newValue) {
|
||||
onChange(newValue as LineInterpolation);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<ToggleGroupItem
|
||||
value={LineInterpolation.Linear}
|
||||
aria-label="Linear"
|
||||
title="Linear"
|
||||
>
|
||||
<svg
|
||||
className="line-interpolation-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<circle cx="8" cy="32" r="3" fill="#888" />
|
||||
<circle cx="24" cy="16" r="3" fill="#888" />
|
||||
<circle cx="40" cy="32" r="3" fill="#888" />
|
||||
<path d="M8 32 L24 16 L40 32" stroke="#888" />
|
||||
</svg>
|
||||
</ToggleGroupItem>
|
||||
<ToggleGroupItem value={LineInterpolation.Spline} aria-label="Spline">
|
||||
<svg
|
||||
className="line-interpolation-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<circle cx="8" cy="32" r="3" fill="#888" />
|
||||
<circle cx="24" cy="16" r="3" fill="#888" />
|
||||
<circle cx="40" cy="32" r="3" fill="#888" />
|
||||
<path d="M8 32 C16 8, 32 8, 40 32" />
|
||||
</svg>
|
||||
</ToggleGroupItem>
|
||||
<ToggleGroupItem
|
||||
value={LineInterpolation.StepAfter}
|
||||
aria-label="Step After"
|
||||
>
|
||||
<svg
|
||||
className="line-interpolation-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<circle cx="8" cy="32" r="3" fill="#888" />
|
||||
<circle cx="24" cy="16" r="3" fill="#888" />
|
||||
<circle cx="40" cy="32" r="3" fill="#888" />
|
||||
<path d="M8 32 V16 H24 V32 H40" />
|
||||
</svg>
|
||||
</ToggleGroupItem>
|
||||
|
||||
<ToggleGroupItem
|
||||
value={LineInterpolation.StepBefore}
|
||||
aria-label="Step Before"
|
||||
>
|
||||
<svg
|
||||
className="line-interpolation-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<circle cx="8" cy="32" r="3" fill="#888" />
|
||||
<circle cx="24" cy="16" r="3" fill="#888" />
|
||||
<circle cx="40" cy="32" r="3" fill="#888" />
|
||||
<path d="M8 32 H24 V16 H40 V32" />
|
||||
</svg>
|
||||
</ToggleGroupItem>
|
||||
</ToggleGroup>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
.line-style-selector {
|
||||
.line-style-icon {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
}
|
||||
|
||||
.line-style-label {
|
||||
text-transform: uppercase;
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.line-style-selector {
|
||||
.line-style-label {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
import { ToggleGroup, ToggleGroupItem } from '@signozhq/toggle-group';
|
||||
import { Typography } from 'antd';
|
||||
import { LineStyle } from 'lib/uPlotV2/config/types';
|
||||
|
||||
import './LineStyleSelector.styles.scss';
|
||||
|
||||
interface LineStyleSelectorProps {
|
||||
value: LineStyle;
|
||||
onChange: (value: LineStyle) => void;
|
||||
}
|
||||
|
||||
export function LineStyleSelector({
|
||||
value,
|
||||
onChange,
|
||||
}: LineStyleSelectorProps): JSX.Element {
|
||||
return (
|
||||
<section className="line-style-selector control-container">
|
||||
<Typography.Text className="section-heading">Line style</Typography.Text>
|
||||
<ToggleGroup
|
||||
type="single"
|
||||
value={value}
|
||||
variant="outline"
|
||||
size="lg"
|
||||
onValueChange={(newValue): void => {
|
||||
if (newValue) {
|
||||
onChange(newValue as LineStyle);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<ToggleGroupItem value={LineStyle.Solid} aria-label="Solid" title="Solid">
|
||||
<svg
|
||||
className="line-style-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<path d="M8 24 L40 24" />
|
||||
</svg>
|
||||
<Typography.Text className="section-heading-small">Solid</Typography.Text>
|
||||
</ToggleGroupItem>
|
||||
<ToggleGroupItem
|
||||
value={LineStyle.Dashed}
|
||||
aria-label="Dashed"
|
||||
title="Dashed"
|
||||
>
|
||||
<svg
|
||||
className="line-style-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
strokeDasharray="6 4"
|
||||
>
|
||||
<path d="M8 24 L40 24" />
|
||||
</svg>
|
||||
<Typography.Text className="section-heading-small">Dashed</Typography.Text>
|
||||
</ToggleGroupItem>
|
||||
</ToggleGroup>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
@@ -1,8 +1,35 @@
|
||||
.right-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
font-family: 'Space Mono';
|
||||
padding-bottom: 48px;
|
||||
|
||||
.section-heading {
|
||||
font-family: 'Space Mono';
|
||||
color: var(--bg-vanilla-400);
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 138.462% */
|
||||
letter-spacing: 0.52px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.section-heading-small {
|
||||
font-family: 'Space Mono';
|
||||
color: var(--bg-vanilla-400);
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
word-break: initial;
|
||||
line-height: 16px; /* 133.333% */
|
||||
letter-spacing: 0.48px;
|
||||
}
|
||||
|
||||
.panel-type-select {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.header {
|
||||
display: flex;
|
||||
padding: 14px 14px 14px 12px;
|
||||
@@ -31,74 +58,25 @@
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.name-description {
|
||||
padding: 0 0 4px 0;
|
||||
|
||||
.typography {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: 'Space Mono';
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 138.462% */
|
||||
letter-spacing: 0.52px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.name-input {
|
||||
display: flex;
|
||||
padding: 6px 6px 6px 8px;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
flex: 1 0 0;
|
||||
align-self: stretch;
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
color: var(--bg-vanilla-100);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 128.571% */
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
|
||||
.description-input {
|
||||
border-style: unset;
|
||||
.ant-input {
|
||||
display: flex;
|
||||
height: 80px;
|
||||
padding: 6px 6px 6px 8px;
|
||||
align-items: flex-start;
|
||||
gap: 4px;
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
color: var(--bg-vanilla-100);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 128.571% */
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.panel-config {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
.typography {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: 'Space Mono';
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 138.462% */
|
||||
letter-spacing: 0.52px;
|
||||
text-transform: uppercase;
|
||||
.toggle-card {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
padding: 12px;
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
.toggle-card-text-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.panel-type-select {
|
||||
@@ -114,55 +92,16 @@
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
}
|
||||
|
||||
.select-option {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
.icon {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.display {
|
||||
color: var(--bg-vanilla-100);
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 16px; /* 133.333% */
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.fill-gaps {
|
||||
display: flex;
|
||||
padding: 12px;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-400);
|
||||
|
||||
.fill-gaps-text {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: 'Space Mono';
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 138.462% */
|
||||
letter-spacing: 0.52px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
.fill-gaps-text-description {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
opacity: 0.6;
|
||||
line-height: 16px; /* 133.333% */
|
||||
}
|
||||
.toggle-card-description {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
opacity: 0.6;
|
||||
line-height: 16px; /* 133.333% */
|
||||
}
|
||||
|
||||
.log-scale,
|
||||
@@ -171,17 +110,6 @@
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.panel-time-text {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: 'Space Mono';
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 138.462% */
|
||||
letter-spacing: 0.52px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.y-axis-unit-selector,
|
||||
.y-axis-unit-selector-v2 {
|
||||
display: flex;
|
||||
@@ -189,14 +117,7 @@
|
||||
gap: 8px;
|
||||
|
||||
.heading {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: 'Space Mono';
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 138.462% */
|
||||
letter-spacing: 0.52px;
|
||||
text-transform: uppercase;
|
||||
@extend .section-heading;
|
||||
}
|
||||
|
||||
.input {
|
||||
@@ -249,7 +170,6 @@
|
||||
|
||||
.text {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: 'Space Mono';
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
@@ -270,111 +190,7 @@
|
||||
.stack-chart {
|
||||
flex-direction: row;
|
||||
justify-content: space-between;
|
||||
.label {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: 'Space Mono';
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 138.462% */
|
||||
letter-spacing: 0.52px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
}
|
||||
|
||||
.bucket-config {
|
||||
.label {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: 'Space Mono';
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 138.462% */
|
||||
letter-spacing: 0.52px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.bucket-size-label {
|
||||
margin-top: 8px;
|
||||
}
|
||||
|
||||
.bucket-input {
|
||||
display: flex;
|
||||
width: 100%;
|
||||
height: 32px;
|
||||
padding: 6px 6px 6px 8px;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
align-self: stretch;
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
|
||||
.ant-input {
|
||||
background: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
|
||||
.combine-hist {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
margin-top: 8px;
|
||||
|
||||
.label {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: 'Space Mono';
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 138.462% */
|
||||
letter-spacing: 0.52px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.alerts {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 12px;
|
||||
min-height: 44px;
|
||||
border-top: 1px solid var(--bg-slate-500);
|
||||
cursor: pointer;
|
||||
|
||||
.left-section {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
|
||||
.bell-icon {
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
|
||||
.alerts-text {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
letter-spacing: 0.14px;
|
||||
}
|
||||
}
|
||||
.plus-icon {
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
}
|
||||
|
||||
.context-links {
|
||||
padding: 12px 12px 16px 12px;
|
||||
border-bottom: 1px solid var(--bg-slate-500);
|
||||
}
|
||||
|
||||
.thresholds-section {
|
||||
padding: 12px 12px 16px 12px;
|
||||
border-top: 1px solid var(--bg-slate-500);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -400,37 +216,16 @@
|
||||
.lightMode {
|
||||
.right-container {
|
||||
background-color: var(--bg-vanilla-100);
|
||||
.section-heading {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
.header {
|
||||
.header-text {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
|
||||
.name-description {
|
||||
.typography {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
.name-input {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-300);
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
|
||||
.description-input {
|
||||
.ant-input {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-300);
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.panel-config {
|
||||
.typography {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
.panel-type-select {
|
||||
.ant-select-selector {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
@@ -455,33 +250,18 @@
|
||||
}
|
||||
}
|
||||
|
||||
.fill-gaps {
|
||||
.toggle-card {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-300);
|
||||
|
||||
.fill-gaps-text {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
.fill-gaps-text-description {
|
||||
.toggle-card-description {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
|
||||
.bucket-config {
|
||||
.label {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
.bucket-input {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-300);
|
||||
|
||||
.ant-input {
|
||||
background: var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.panel-time-text {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
@@ -515,31 +295,6 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.alerts {
|
||||
border-top: 1px solid var(--bg-vanilla-300);
|
||||
|
||||
.left-section {
|
||||
.bell-icon {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
|
||||
.alerts-text {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
.plus-icon {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
|
||||
.context-links {
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
|
||||
.thresholds-section {
|
||||
border-top: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
|
||||
.select-option {
|
||||
|
||||
@@ -0,0 +1,50 @@
|
||||
.alerts-section {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 12px;
|
||||
min-height: 44px;
|
||||
border-top: 1px solid var(--bg-slate-500);
|
||||
cursor: pointer;
|
||||
|
||||
.alerts-section__left {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
|
||||
.alerts-section__bell-icon {
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
|
||||
.alerts-section__text {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
letter-spacing: 0.14px;
|
||||
}
|
||||
}
|
||||
.alerts-section__plus-icon {
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.alerts-section {
|
||||
border-top: 1px solid var(--bg-vanilla-300);
|
||||
|
||||
.alerts-section__left {
|
||||
.alerts-section__bell-icon {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
|
||||
.alerts-section__text {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
.alerts-section__plus-icon {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
import { Typography } from 'antd';
|
||||
import { ConciergeBell, Plus, SquareArrowOutUpRight } from 'lucide-react';
|
||||
|
||||
import './AlertsSection.styles.scss';
|
||||
|
||||
interface AlertsSectionProps {
|
||||
onCreateAlertsHandler: () => void;
|
||||
}
|
||||
|
||||
export default function AlertsSection({
|
||||
onCreateAlertsHandler,
|
||||
}: AlertsSectionProps): JSX.Element {
|
||||
return (
|
||||
<section className="alerts-section" onClick={onCreateAlertsHandler}>
|
||||
<div className="alerts-section__left">
|
||||
<ConciergeBell size={14} className="alerts-section__bell-icon" />
|
||||
<Typography.Text className="alerts-section__text">Alerts</Typography.Text>
|
||||
<SquareArrowOutUpRight size={10} className="info-icon" />
|
||||
</div>
|
||||
<Plus size={14} className="alerts-section__plus-icon" />
|
||||
</section>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,98 @@
|
||||
import { Dispatch, SetStateAction } from 'react';
|
||||
import { InputNumber, Select, Typography } from 'antd';
|
||||
import { Axis3D, LineChart, Spline } from 'lucide-react';
|
||||
|
||||
import SettingsSection from '../../components/SettingsSection/SettingsSection';
|
||||
|
||||
enum LogScale {
|
||||
LINEAR = 'linear',
|
||||
LOGARITHMIC = 'logarithmic',
|
||||
}
|
||||
|
||||
const { Option } = Select;
|
||||
|
||||
interface AxesSectionProps {
|
||||
allowSoftMinMax: boolean;
|
||||
allowLogScale: boolean;
|
||||
softMin: number | null;
|
||||
softMax: number | null;
|
||||
setSoftMin: Dispatch<SetStateAction<number | null>>;
|
||||
setSoftMax: Dispatch<SetStateAction<number | null>>;
|
||||
isLogScale: boolean;
|
||||
setIsLogScale: Dispatch<SetStateAction<boolean>>;
|
||||
}
|
||||
|
||||
export default function AxesSection({
|
||||
allowSoftMinMax,
|
||||
allowLogScale,
|
||||
softMin,
|
||||
softMax,
|
||||
setSoftMin,
|
||||
setSoftMax,
|
||||
isLogScale,
|
||||
setIsLogScale,
|
||||
}: AxesSectionProps): JSX.Element {
|
||||
const softMinHandler = (value: number | null): void => {
|
||||
setSoftMin(value);
|
||||
};
|
||||
|
||||
const softMaxHandler = (value: number | null): void => {
|
||||
setSoftMax(value);
|
||||
};
|
||||
|
||||
return (
|
||||
<SettingsSection title="Axes" icon={<Axis3D size={14} />}>
|
||||
{allowSoftMinMax && (
|
||||
<section className="soft-min-max">
|
||||
<section className="container">
|
||||
<Typography.Text className="text">Soft Min</Typography.Text>
|
||||
<InputNumber
|
||||
type="number"
|
||||
value={softMin}
|
||||
onChange={softMinHandler}
|
||||
rootClassName="input"
|
||||
/>
|
||||
</section>
|
||||
<section className="container">
|
||||
<Typography.Text className="text">Soft Max</Typography.Text>
|
||||
<InputNumber
|
||||
value={softMax}
|
||||
type="number"
|
||||
rootClassName="input"
|
||||
onChange={softMaxHandler}
|
||||
/>
|
||||
</section>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{allowLogScale && (
|
||||
<section className="log-scale control-container">
|
||||
<Typography.Text className="section-heading">Y Axis Scale</Typography.Text>
|
||||
<Select
|
||||
onChange={(value): void => setIsLogScale(value === LogScale.LOGARITHMIC)}
|
||||
value={isLogScale ? LogScale.LOGARITHMIC : LogScale.LINEAR}
|
||||
className="panel-type-select"
|
||||
defaultValue={LogScale.LINEAR}
|
||||
>
|
||||
<Option value={LogScale.LINEAR}>
|
||||
<div className="select-option">
|
||||
<div className="icon">
|
||||
<LineChart size={16} />
|
||||
</div>
|
||||
<Typography.Text className="display">Linear</Typography.Text>
|
||||
</div>
|
||||
</Option>
|
||||
<Option value={LogScale.LOGARITHMIC}>
|
||||
<div className="select-option">
|
||||
<div className="icon">
|
||||
<Spline size={16} />
|
||||
</div>
|
||||
<Typography.Text className="display">Logarithmic</Typography.Text>
|
||||
</div>
|
||||
</Option>
|
||||
</Select>
|
||||
</section>
|
||||
)}
|
||||
</SettingsSection>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
import { Dispatch, SetStateAction } from 'react';
|
||||
import { Switch, Typography } from 'antd';
|
||||
import {
|
||||
FillMode,
|
||||
LineInterpolation,
|
||||
LineStyle,
|
||||
} from 'lib/uPlotV2/config/types';
|
||||
import { Paintbrush } from 'lucide-react';
|
||||
|
||||
import { FillModeSelector } from '../../components/FillModeSelector/FillModeSelector';
|
||||
import { LineInterpolationSelector } from '../../components/LineInterpolationSelector/LineInterpolationSelector';
|
||||
import { LineStyleSelector } from '../../components/LineStyleSelector/LineStyleSelector';
|
||||
import SettingsSection from '../../components/SettingsSection/SettingsSection';
|
||||
|
||||
interface ChartAppearanceSectionProps {
|
||||
fillMode: FillMode;
|
||||
setFillMode: Dispatch<SetStateAction<FillMode>>;
|
||||
lineStyle: LineStyle;
|
||||
setLineStyle: Dispatch<SetStateAction<LineStyle>>;
|
||||
lineInterpolation: LineInterpolation;
|
||||
setLineInterpolation: Dispatch<SetStateAction<LineInterpolation>>;
|
||||
showPoints: boolean;
|
||||
setShowPoints: Dispatch<SetStateAction<boolean>>;
|
||||
allowFillMode: boolean;
|
||||
allowLineStyle: boolean;
|
||||
allowLineInterpolation: boolean;
|
||||
allowShowPoints: boolean;
|
||||
}
|
||||
|
||||
export default function ChartAppearanceSection({
|
||||
fillMode,
|
||||
setFillMode,
|
||||
lineStyle,
|
||||
setLineStyle,
|
||||
lineInterpolation,
|
||||
setLineInterpolation,
|
||||
showPoints,
|
||||
setShowPoints,
|
||||
allowFillMode,
|
||||
allowLineStyle,
|
||||
allowLineInterpolation,
|
||||
allowShowPoints,
|
||||
}: ChartAppearanceSectionProps): JSX.Element {
|
||||
return (
|
||||
<SettingsSection title="Chart Appearance" icon={<Paintbrush size={14} />}>
|
||||
{allowFillMode && (
|
||||
<FillModeSelector value={fillMode} onChange={setFillMode} />
|
||||
)}
|
||||
{allowLineStyle && (
|
||||
<LineStyleSelector value={lineStyle} onChange={setLineStyle} />
|
||||
)}
|
||||
{allowLineInterpolation && (
|
||||
<LineInterpolationSelector
|
||||
value={lineInterpolation}
|
||||
onChange={setLineInterpolation}
|
||||
/>
|
||||
)}
|
||||
{allowShowPoints && (
|
||||
<section className="show-points toggle-card">
|
||||
<div className="toggle-card-text-container">
|
||||
<Typography.Text className="section-heading">Show points</Typography.Text>
|
||||
<Typography.Text className="toggle-card-description">
|
||||
Display individual data points on the chart
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<Switch size="small" checked={showPoints} onChange={setShowPoints} />
|
||||
</section>
|
||||
)}
|
||||
</SettingsSection>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
.context-links-section {
|
||||
padding: 12px 12px 16px 12px;
|
||||
border-bottom: 1px solid var(--bg-slate-500);
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.context-links-section {
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
import { Dispatch, SetStateAction } from 'react';
|
||||
import { Link as LinkIcon } from 'lucide-react';
|
||||
import { ContextLinksData, Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
import SettingsSection from '../../components/SettingsSection/SettingsSection';
|
||||
import ContextLinks from '../../ContextLinks';
|
||||
|
||||
import './ContextLinksSection.styles.scss';
|
||||
|
||||
interface ContextLinksSectionProps {
|
||||
contextLinks: ContextLinksData;
|
||||
setContextLinks: Dispatch<SetStateAction<ContextLinksData>>;
|
||||
selectedWidget?: Widgets;
|
||||
}
|
||||
|
||||
export default function ContextLinksSection({
|
||||
contextLinks,
|
||||
setContextLinks,
|
||||
selectedWidget,
|
||||
}: ContextLinksSectionProps): JSX.Element {
|
||||
return (
|
||||
<SettingsSection
|
||||
title="Context Links"
|
||||
icon={<LinkIcon size={14} />}
|
||||
defaultOpen={!!contextLinks.linksData.length}
|
||||
>
|
||||
<div className="context-links-section">
|
||||
<ContextLinks
|
||||
contextLinks={contextLinks}
|
||||
setContextLinks={setContextLinks}
|
||||
selectedWidget={selectedWidget}
|
||||
/>
|
||||
</div>
|
||||
</SettingsSection>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
import { Dispatch, SetStateAction } from 'react';
|
||||
import { Select, Typography } from 'antd';
|
||||
import { PrecisionOption } from 'components/Graph/types';
|
||||
import { PanelDisplay } from 'constants/queryBuilder';
|
||||
import { SlidersHorizontal } from 'lucide-react';
|
||||
import { ColumnUnit } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { ColumnUnitSelector } from '../../ColumnUnitSelector/ColumnUnitSelector';
|
||||
import SettingsSection from '../../components/SettingsSection/SettingsSection';
|
||||
import DashboardYAxisUnitSelectorWrapper from '../../DashboardYAxisUnitSelectorWrapper';
|
||||
|
||||
interface FormattingUnitsSectionProps {
|
||||
selectedPanelDisplay: PanelDisplay | '';
|
||||
yAxisUnit: string;
|
||||
setYAxisUnit: Dispatch<SetStateAction<string>>;
|
||||
isNewDashboard: boolean;
|
||||
decimalPrecision: PrecisionOption;
|
||||
setDecimalPrecision: Dispatch<SetStateAction<PrecisionOption>>;
|
||||
columnUnits: ColumnUnit;
|
||||
setColumnUnits: Dispatch<SetStateAction<ColumnUnit>>;
|
||||
allowYAxisUnit: boolean;
|
||||
allowDecimalPrecision: boolean;
|
||||
allowPanelColumnPreference: boolean;
|
||||
decimapPrecisionOptions: { label: string; value: PrecisionOption }[];
|
||||
}
|
||||
|
||||
export default function FormattingUnitsSection({
|
||||
selectedPanelDisplay,
|
||||
yAxisUnit,
|
||||
setYAxisUnit,
|
||||
isNewDashboard,
|
||||
decimalPrecision,
|
||||
setDecimalPrecision,
|
||||
columnUnits,
|
||||
setColumnUnits,
|
||||
allowYAxisUnit,
|
||||
allowDecimalPrecision,
|
||||
allowPanelColumnPreference,
|
||||
decimapPrecisionOptions,
|
||||
}: FormattingUnitsSectionProps): JSX.Element {
|
||||
return (
|
||||
<SettingsSection
|
||||
title="Formatting & Units"
|
||||
icon={<SlidersHorizontal size={14} />}
|
||||
>
|
||||
{allowYAxisUnit && (
|
||||
<DashboardYAxisUnitSelectorWrapper
|
||||
onSelect={setYAxisUnit}
|
||||
value={yAxisUnit || ''}
|
||||
fieldLabel={
|
||||
selectedPanelDisplay === PanelDisplay.VALUE ||
|
||||
selectedPanelDisplay === PanelDisplay.PIE
|
||||
? 'Unit'
|
||||
: 'Y Axis Unit'
|
||||
}
|
||||
shouldUpdateYAxisUnit={isNewDashboard}
|
||||
/>
|
||||
)}
|
||||
|
||||
{allowDecimalPrecision && (
|
||||
<section className="decimal-precision-selector control-container">
|
||||
<Typography.Text className="section-heading">
|
||||
Decimal Precision
|
||||
</Typography.Text>
|
||||
<Select
|
||||
options={decimapPrecisionOptions}
|
||||
value={decimalPrecision}
|
||||
className="panel-type-select"
|
||||
defaultValue={decimapPrecisionOptions[0]?.value}
|
||||
onChange={(val: PrecisionOption): void => setDecimalPrecision(val)}
|
||||
/>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{allowPanelColumnPreference && (
|
||||
<ColumnUnitSelector
|
||||
columnUnits={columnUnits}
|
||||
setColumnUnits={setColumnUnits}
|
||||
isNewDashboard={isNewDashboard}
|
||||
/>
|
||||
)}
|
||||
</SettingsSection>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
.general-settings__name-description {
|
||||
padding: 0 0 4px 0;
|
||||
|
||||
.general-settings__name-input {
|
||||
display: flex;
|
||||
padding: 6px 6px 6px 8px;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
flex: 1 0 0;
|
||||
align-self: stretch;
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
color: var(--bg-vanilla-100);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 128.571% */
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
|
||||
.general-settings__description-input {
|
||||
border-style: unset;
|
||||
.ant-input {
|
||||
display: flex;
|
||||
height: 80px;
|
||||
padding: 6px 6px 6px 8px;
|
||||
align-items: flex-start;
|
||||
gap: 4px;
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
color: var(--bg-vanilla-100);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 128.571% */
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.general-settings__name-description {
|
||||
border-top: 1px solid var(--bg-vanilla-300);
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
|
||||
.general-settings__name-input {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-300);
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
|
||||
.general-settings__description-input {
|
||||
.ant-input {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-300);
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,156 @@
|
||||
import {
|
||||
Dispatch,
|
||||
SetStateAction,
|
||||
useCallback,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import type { InputRef } from 'antd';
|
||||
import { AutoComplete, Input, Typography } from 'antd';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import SettingsSection from '../../components/SettingsSection/SettingsSection';
|
||||
|
||||
import './GeneralSettingsSection.styles.scss';
|
||||
|
||||
const { TextArea } = Input;
|
||||
|
||||
interface VariableOption {
|
||||
value: string;
|
||||
label: string;
|
||||
}
|
||||
|
||||
interface GeneralSettingsSectionProps {
|
||||
title: string;
|
||||
setTitle: Dispatch<SetStateAction<string>>;
|
||||
description: string;
|
||||
setDescription: Dispatch<SetStateAction<string>>;
|
||||
dashboardVariables: Record<string, { name?: string }>;
|
||||
}
|
||||
|
||||
export default function GeneralSettingsSection({
|
||||
title,
|
||||
setTitle,
|
||||
description,
|
||||
setDescription,
|
||||
dashboardVariables,
|
||||
}: GeneralSettingsSectionProps): JSX.Element {
|
||||
const [inputValue, setInputValue] = useState(title);
|
||||
const [autoCompleteOpen, setAutoCompleteOpen] = useState(false);
|
||||
const [cursorPos, setCursorPos] = useState(0);
|
||||
const inputRef = useRef<InputRef>(null);
|
||||
|
||||
const onChangeHandler = (
|
||||
setFunc: Dispatch<SetStateAction<string>>,
|
||||
value: string,
|
||||
): void => {
|
||||
setFunc(value);
|
||||
};
|
||||
|
||||
const dashboardVariableOptions = useMemo<VariableOption[]>(() => {
|
||||
return Object.entries(dashboardVariables).map(([, value]) => ({
|
||||
value: value.name || '',
|
||||
label: value.name || '',
|
||||
}));
|
||||
}, [dashboardVariables]);
|
||||
|
||||
const updateCursorAndDropdown = useCallback(
|
||||
(value: string, pos: number): void => {
|
||||
setCursorPos(pos);
|
||||
const lastDollar = value.lastIndexOf('$', pos - 1);
|
||||
setAutoCompleteOpen(lastDollar !== -1 && pos >= lastDollar + 1);
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const onInputChange = useCallback(
|
||||
(value: string): void => {
|
||||
setInputValue(value);
|
||||
onChangeHandler(setTitle, value);
|
||||
setTimeout(() => {
|
||||
const pos = inputRef.current?.input?.selectionStart ?? 0;
|
||||
updateCursorAndDropdown(value, pos);
|
||||
}, 0);
|
||||
},
|
||||
[setTitle, updateCursorAndDropdown],
|
||||
);
|
||||
|
||||
const onSelect = useCallback(
|
||||
(selectedValue: string): void => {
|
||||
const pos = cursorPos;
|
||||
const value = inputValue;
|
||||
const lastDollar = value.lastIndexOf('$', pos - 1);
|
||||
const textBeforeDollar = value.substring(0, lastDollar);
|
||||
const textAfterDollar = value.substring(lastDollar + 1);
|
||||
const match = textAfterDollar.match(/^([a-zA-Z0-9_.]*)/);
|
||||
const rest = textAfterDollar.substring(match ? match[1].length : 0);
|
||||
const newValue = `${textBeforeDollar}$${selectedValue}${rest}`;
|
||||
setInputValue(newValue);
|
||||
onChangeHandler(setTitle, newValue);
|
||||
setAutoCompleteOpen(false);
|
||||
setTimeout(() => {
|
||||
const newCursor = `${textBeforeDollar}$${selectedValue}`.length;
|
||||
inputRef.current?.input?.setSelectionRange(newCursor, newCursor);
|
||||
setCursorPos(newCursor);
|
||||
}, 0);
|
||||
},
|
||||
[cursorPos, inputValue, setTitle],
|
||||
);
|
||||
|
||||
const filterOption = useCallback(
|
||||
(currentInputValue: string, option?: VariableOption): boolean => {
|
||||
const pos = cursorPos;
|
||||
const value = currentInputValue;
|
||||
const lastDollar = value.lastIndexOf('$', pos - 1);
|
||||
if (lastDollar === -1) {
|
||||
return false;
|
||||
}
|
||||
const afterDollar = value.substring(lastDollar + 1, pos).toLowerCase();
|
||||
return option?.value.toLowerCase().startsWith(afterDollar) || false;
|
||||
},
|
||||
[cursorPos],
|
||||
);
|
||||
|
||||
const handleInputCursor = useCallback((): void => {
|
||||
const pos = inputRef.current?.input?.selectionStart ?? 0;
|
||||
updateCursorAndDropdown(inputValue, pos);
|
||||
}, [inputValue, updateCursorAndDropdown]);
|
||||
|
||||
return (
|
||||
<SettingsSection title="General" defaultOpen icon={null}>
|
||||
<section className="general-settings__name-description control-container">
|
||||
<Typography.Text className="section-heading">Name</Typography.Text>
|
||||
<AutoComplete
|
||||
options={dashboardVariableOptions}
|
||||
value={inputValue}
|
||||
onChange={onInputChange}
|
||||
onSelect={onSelect}
|
||||
filterOption={filterOption}
|
||||
getPopupContainer={popupContainer}
|
||||
placeholder="Enter the panel name here..."
|
||||
open={autoCompleteOpen}
|
||||
>
|
||||
<Input
|
||||
rootClassName="general-settings__name-input"
|
||||
ref={inputRef}
|
||||
onSelect={handleInputCursor}
|
||||
onClick={handleInputCursor}
|
||||
onBlur={(): void => setAutoCompleteOpen(false)}
|
||||
/>
|
||||
</AutoComplete>
|
||||
<Typography.Text className="section-heading">Description</Typography.Text>
|
||||
<TextArea
|
||||
placeholder="Enter the panel description here..."
|
||||
bordered
|
||||
allowClear
|
||||
value={description}
|
||||
onChange={(event): void =>
|
||||
onChangeHandler(setDescription, event.target.value)
|
||||
}
|
||||
rootClassName="general-settings__description-input"
|
||||
/>
|
||||
</section>
|
||||
</SettingsSection>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
.histogram-settings__bucket-config {
|
||||
.histogram-settings__bucket-size-label {
|
||||
margin-top: 8px;
|
||||
}
|
||||
|
||||
.histogram-settings__bucket-input {
|
||||
display: flex;
|
||||
width: 100%;
|
||||
height: 32px;
|
||||
padding: 6px 6px 6px 8px;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
align-self: stretch;
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
|
||||
.ant-input {
|
||||
background: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
|
||||
.histogram-settings__combine-hist {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
margin-top: 8px;
|
||||
|
||||
.histogram-settings__merge-label {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 138.462% */
|
||||
letter-spacing: 0.52px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.histogram-settings__bucket-config {
|
||||
.histogram-settings__merge-label {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
.histogram-settings__bucket-input {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-300);
|
||||
|
||||
.ant-input {
|
||||
background: var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
import { Dispatch, SetStateAction } from 'react';
|
||||
import { InputNumber, Switch, Typography } from 'antd';
|
||||
|
||||
import SettingsSection from '../../components/SettingsSection/SettingsSection';
|
||||
|
||||
import './HistogramBucketsSection.styles.scss';
|
||||
|
||||
interface HistogramBucketsSectionProps {
|
||||
bucketCount: number;
|
||||
setBucketCount: Dispatch<SetStateAction<number>>;
|
||||
bucketWidth: number;
|
||||
setBucketWidth: Dispatch<SetStateAction<number>>;
|
||||
combineHistogram: boolean;
|
||||
setCombineHistogram: Dispatch<SetStateAction<boolean>>;
|
||||
}
|
||||
|
||||
export default function HistogramBucketsSection({
|
||||
bucketCount,
|
||||
setBucketCount,
|
||||
bucketWidth,
|
||||
setBucketWidth,
|
||||
combineHistogram,
|
||||
setCombineHistogram,
|
||||
}: HistogramBucketsSectionProps): JSX.Element {
|
||||
return (
|
||||
<SettingsSection title="Histogram / Buckets">
|
||||
<section className="histogram-settings__bucket-config control-container">
|
||||
<Typography.Text className="section-heading">
|
||||
Number of buckets
|
||||
</Typography.Text>
|
||||
<InputNumber
|
||||
value={bucketCount || null}
|
||||
type="number"
|
||||
min={0}
|
||||
rootClassName="bucket-input"
|
||||
placeholder="Default: 30"
|
||||
onChange={(val): void => {
|
||||
setBucketCount(val || 0);
|
||||
}}
|
||||
/>
|
||||
<Typography.Text className="section-heading histogram-settings__bucket-size-label">
|
||||
Bucket width
|
||||
</Typography.Text>
|
||||
<InputNumber
|
||||
value={bucketWidth || null}
|
||||
type="number"
|
||||
precision={2}
|
||||
placeholder="Default: Auto"
|
||||
step={0.1}
|
||||
min={0.0}
|
||||
rootClassName="histogram-settings__bucket-input"
|
||||
onChange={(val): void => {
|
||||
setBucketWidth(val || 0);
|
||||
}}
|
||||
/>
|
||||
<section className="histogram-settings__combine-hist">
|
||||
<Typography.Text className="section-heading">
|
||||
<span className="histogram-settings__merge-label">
|
||||
Merge all series into one
|
||||
</span>
|
||||
</Typography.Text>
|
||||
<Switch
|
||||
checked={combineHistogram}
|
||||
size="small"
|
||||
onChange={(checked): void => setCombineHistogram(checked)}
|
||||
/>
|
||||
</section>
|
||||
</section>
|
||||
</SettingsSection>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
import { Dispatch, SetStateAction } from 'react';
|
||||
import type { UseQueryResult } from 'react-query';
|
||||
import { Select, Typography } from 'antd';
|
||||
import { Layers } from 'lucide-react';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { LegendPosition } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
|
||||
import SettingsSection from '../../components/SettingsSection/SettingsSection';
|
||||
import LegendColors from '../../LegendColors/LegendColors';
|
||||
|
||||
const { Option } = Select;
|
||||
|
||||
interface LegendSectionProps {
|
||||
allowLegendPosition: boolean;
|
||||
allowLegendColors: boolean;
|
||||
legendPosition: LegendPosition;
|
||||
setLegendPosition: Dispatch<SetStateAction<LegendPosition>>;
|
||||
customLegendColors: Record<string, string>;
|
||||
setCustomLegendColors: Dispatch<SetStateAction<Record<string, string>>>;
|
||||
queryResponse?: UseQueryResult<
|
||||
SuccessResponse<MetricRangePayloadProps, unknown>,
|
||||
Error
|
||||
>;
|
||||
}
|
||||
|
||||
export default function LegendSection({
|
||||
allowLegendPosition,
|
||||
allowLegendColors,
|
||||
legendPosition,
|
||||
setLegendPosition,
|
||||
customLegendColors,
|
||||
setCustomLegendColors,
|
||||
queryResponse,
|
||||
}: LegendSectionProps): JSX.Element {
|
||||
return (
|
||||
<SettingsSection title="Legend" icon={<Layers size={14} />}>
|
||||
{allowLegendPosition && (
|
||||
<section className="legend-position control-container">
|
||||
<Typography.Text className="section-heading">Position</Typography.Text>
|
||||
<Select
|
||||
onChange={(value: LegendPosition): void => setLegendPosition(value)}
|
||||
value={legendPosition}
|
||||
className="panel-type-select"
|
||||
defaultValue={LegendPosition.BOTTOM}
|
||||
>
|
||||
<Option value={LegendPosition.BOTTOM}>
|
||||
<div className="select-option">
|
||||
<Typography.Text className="display">Bottom</Typography.Text>
|
||||
</div>
|
||||
</Option>
|
||||
<Option value={LegendPosition.RIGHT}>
|
||||
<div className="select-option">
|
||||
<Typography.Text className="display">Right</Typography.Text>
|
||||
</div>
|
||||
</Option>
|
||||
</Select>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{allowLegendColors && (
|
||||
<section className="legend-colors">
|
||||
<LegendColors
|
||||
customLegendColors={customLegendColors}
|
||||
setCustomLegendColors={setCustomLegendColors}
|
||||
queryResponse={queryResponse}
|
||||
/>
|
||||
</section>
|
||||
)}
|
||||
</SettingsSection>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
.thresholds-section {
|
||||
padding: 12px 12px 16px 12px;
|
||||
border-top: 1px solid var(--bg-slate-500);
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.thresholds-section {
|
||||
border-top: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
import { Dispatch, SetStateAction } from 'react';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { Antenna } from 'lucide-react';
|
||||
import { ColumnUnit } from 'types/api/dashboard/getAll';
|
||||
|
||||
import SettingsSection from '../../components/SettingsSection/SettingsSection';
|
||||
import ThresholdSelector from '../../Threshold/ThresholdSelector';
|
||||
import { ThresholdProps } from '../../Threshold/types';
|
||||
|
||||
import './ThresholdsSection.styles.scss';
|
||||
|
||||
interface ThresholdsSectionProps {
|
||||
thresholds: ThresholdProps[];
|
||||
setThresholds: Dispatch<SetStateAction<ThresholdProps[]>>;
|
||||
yAxisUnit: string;
|
||||
selectedGraph: PANEL_TYPES;
|
||||
columnUnits: ColumnUnit;
|
||||
}
|
||||
|
||||
export default function ThresholdsSection({
|
||||
thresholds,
|
||||
setThresholds,
|
||||
yAxisUnit,
|
||||
selectedGraph,
|
||||
columnUnits,
|
||||
}: ThresholdsSectionProps): JSX.Element {
|
||||
return (
|
||||
<SettingsSection
|
||||
title="Thresholds"
|
||||
icon={<Antenna size={14} />}
|
||||
defaultOpen={!!thresholds.length}
|
||||
>
|
||||
<ThresholdSelector
|
||||
thresholds={thresholds}
|
||||
setThresholds={setThresholds}
|
||||
yAxisUnit={yAxisUnit}
|
||||
selectedGraph={selectedGraph}
|
||||
columnUnits={columnUnits}
|
||||
/>
|
||||
</SettingsSection>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,130 @@
|
||||
import { Dispatch, SetStateAction, useEffect, useState } from 'react';
|
||||
import { Select, Switch, Typography } from 'antd';
|
||||
import TimePreference from 'components/TimePreferenceDropDown';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import {
|
||||
ItemsProps,
|
||||
PanelTypesWithData,
|
||||
} from 'container/DashboardContainer/PanelTypeSelectionModal/menuItems';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { LayoutDashboard } from 'lucide-react';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import SettingsSection from '../../components/SettingsSection/SettingsSection';
|
||||
import { timePreferance } from '../../timeItems';
|
||||
|
||||
const { Option } = Select;
|
||||
|
||||
interface VisualizationSettingsSectionProps {
|
||||
selectedGraph: PANEL_TYPES;
|
||||
setGraphHandler: (type: PANEL_TYPES) => void;
|
||||
selectedTime: timePreferance;
|
||||
setSelectedTime: Dispatch<SetStateAction<timePreferance>>;
|
||||
stackedBarChart: boolean;
|
||||
setStackedBarChart: Dispatch<SetStateAction<boolean>>;
|
||||
isFillSpans: boolean;
|
||||
setIsFillSpans: Dispatch<SetStateAction<boolean>>;
|
||||
allowPanelTimePreference: boolean;
|
||||
allowStackingBarChart: boolean;
|
||||
allowFillSpans: boolean;
|
||||
}
|
||||
|
||||
export default function VisualizationSettingsSection({
|
||||
selectedGraph,
|
||||
setGraphHandler,
|
||||
selectedTime,
|
||||
setSelectedTime,
|
||||
stackedBarChart,
|
||||
setStackedBarChart,
|
||||
isFillSpans,
|
||||
setIsFillSpans,
|
||||
allowPanelTimePreference,
|
||||
allowStackingBarChart,
|
||||
allowFillSpans,
|
||||
}: VisualizationSettingsSectionProps): JSX.Element {
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
const [graphTypes, setGraphTypes] = useState<ItemsProps[]>(PanelTypesWithData);
|
||||
|
||||
useEffect(() => {
|
||||
const queryContainsMetricsDataSource = currentQuery.builder.queryData.some(
|
||||
(query) => query.dataSource === DataSource.METRICS,
|
||||
);
|
||||
|
||||
if (queryContainsMetricsDataSource) {
|
||||
setGraphTypes((prev) =>
|
||||
prev.filter((graph) => graph.name !== PANEL_TYPES.LIST),
|
||||
);
|
||||
} else {
|
||||
setGraphTypes(PanelTypesWithData);
|
||||
}
|
||||
}, [currentQuery]);
|
||||
|
||||
return (
|
||||
<SettingsSection
|
||||
title="Visualization"
|
||||
defaultOpen
|
||||
icon={<LayoutDashboard size={14} />}
|
||||
>
|
||||
<section className="panel-type control-container">
|
||||
<Typography.Text className="section-heading">Panel Type</Typography.Text>
|
||||
<Select
|
||||
onChange={setGraphHandler}
|
||||
value={selectedGraph}
|
||||
className="panel-type-select"
|
||||
data-testid="panel-change-select"
|
||||
data-stacking-state={stackedBarChart ? 'true' : 'false'}
|
||||
>
|
||||
{graphTypes.map((item) => (
|
||||
<Option key={item.name} value={item.name}>
|
||||
<div className="select-option">
|
||||
<div className="icon">{item.icon}</div>
|
||||
<Typography.Text className="display">{item.display}</Typography.Text>
|
||||
</div>
|
||||
</Option>
|
||||
))}
|
||||
</Select>
|
||||
</section>
|
||||
|
||||
{allowPanelTimePreference && (
|
||||
<section className="panel-time-preference control-container">
|
||||
<Typography.Text className="section-heading">
|
||||
Panel Time Preference
|
||||
</Typography.Text>
|
||||
<TimePreference
|
||||
{...{
|
||||
selectedTime,
|
||||
setSelectedTime,
|
||||
}}
|
||||
/>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{allowStackingBarChart && (
|
||||
<section className="stack-chart control-container">
|
||||
<Typography.Text className="section-heading">Stack series</Typography.Text>
|
||||
<Switch
|
||||
checked={stackedBarChart}
|
||||
size="small"
|
||||
onChange={(checked): void => setStackedBarChart(checked)}
|
||||
/>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{allowFillSpans && (
|
||||
<section className="fill-gaps toggle-card">
|
||||
<div className="toggle-card-text-container">
|
||||
<Typography className="section-heading">Fill gaps</Typography>
|
||||
<Typography.Text className="toggle-card-description">
|
||||
Fill gaps in data with 0 for continuity
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<Switch
|
||||
checked={isFillSpans}
|
||||
size="small"
|
||||
onChange={(checked): void => setIsFillSpans(checked)}
|
||||
/>
|
||||
</section>
|
||||
)}
|
||||
</SettingsSection>
|
||||
);
|
||||
}
|
||||
@@ -6,6 +6,11 @@ import { MemoryRouter } from 'react-router-dom';
|
||||
import { render as rtlRender, screen } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import {
|
||||
FillMode,
|
||||
LineInterpolation,
|
||||
LineStyle,
|
||||
} from 'lib/uPlotV2/config/types';
|
||||
import { AppContext } from 'providers/App/App';
|
||||
import { IAppContext } from 'providers/App/types';
|
||||
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
|
||||
@@ -165,6 +170,14 @@ describe('RightContainer - Alerts Section', () => {
|
||||
setContextLinks: jest.fn(),
|
||||
enableDrillDown: false,
|
||||
isNewDashboard: false,
|
||||
lineInterpolation: LineInterpolation.Spline,
|
||||
fillMode: FillMode.None,
|
||||
lineStyle: LineStyle.Solid,
|
||||
setLineInterpolation: jest.fn(),
|
||||
setFillMode: jest.fn(),
|
||||
setLineStyle: jest.fn(),
|
||||
showPoints: false,
|
||||
setShowPoints: jest.fn(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
@@ -176,7 +189,7 @@ describe('RightContainer - Alerts Section', () => {
|
||||
|
||||
const alertsSection = screen.getByText('Alerts').closest('section');
|
||||
expect(alertsSection).toBeInTheDocument();
|
||||
expect(alertsSection).toHaveClass('alerts');
|
||||
expect(alertsSection).toHaveClass('alerts-section');
|
||||
});
|
||||
|
||||
it('renders alerts section with correct text and SquareArrowOutUpRight icon', () => {
|
||||
|
||||
@@ -0,0 +1,21 @@
|
||||
.fill-mode-selector {
|
||||
.fill-mode-icon {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
}
|
||||
|
||||
.fill-mode-label {
|
||||
text-transform: uppercase;
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.fill-mode-selector {
|
||||
.fill-mode-label {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
import { ToggleGroup, ToggleGroupItem } from '@signozhq/toggle-group';
|
||||
import { Typography } from 'antd';
|
||||
import { FillMode } from 'lib/uPlotV2/config/types';
|
||||
|
||||
import './FillModeSelector.styles.scss';
|
||||
|
||||
interface FillModeSelectorProps {
|
||||
value: FillMode;
|
||||
onChange: (value: FillMode) => void;
|
||||
}
|
||||
|
||||
export function FillModeSelector({
|
||||
value,
|
||||
onChange,
|
||||
}: FillModeSelectorProps): JSX.Element {
|
||||
return (
|
||||
<section className="fill-mode-selector control-container">
|
||||
<Typography.Text className="section-heading">Fill mode</Typography.Text>
|
||||
<ToggleGroup
|
||||
type="single"
|
||||
value={value}
|
||||
variant="outline"
|
||||
size="lg"
|
||||
onValueChange={(newValue): void => {
|
||||
if (newValue) {
|
||||
onChange(newValue as FillMode);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<ToggleGroupItem value={FillMode.None} aria-label="None" title="None">
|
||||
<svg
|
||||
className="fill-mode-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<rect x="8" y="16" width="32" height="16" stroke="#888" fill="none" />
|
||||
</svg>
|
||||
<Typography.Text className="section-heading-small">None</Typography.Text>
|
||||
</ToggleGroupItem>
|
||||
<ToggleGroupItem value={FillMode.Solid} aria-label="Solid" title="Solid">
|
||||
<svg
|
||||
className="fill-mode-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<rect x="8" y="16" width="32" height="16" fill="#888" />
|
||||
</svg>
|
||||
<Typography.Text className="section-heading-small">Solid</Typography.Text>
|
||||
</ToggleGroupItem>
|
||||
<ToggleGroupItem
|
||||
value={FillMode.Gradient}
|
||||
aria-label="Gradient"
|
||||
title="Gradient"
|
||||
>
|
||||
<svg
|
||||
className="fill-mode-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<defs>
|
||||
<linearGradient id="fill-gradient" x1="0" y1="0" x2="1" y2="0">
|
||||
<stop offset="0%" stopColor="#888" stopOpacity="0.2" />
|
||||
<stop offset="100%" stopColor="#888" stopOpacity="0.8" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect
|
||||
x="8"
|
||||
y="16"
|
||||
width="32"
|
||||
height="16"
|
||||
fill="url(#fill-gradient)"
|
||||
stroke="#888"
|
||||
/>
|
||||
</svg>
|
||||
<Typography.Text className="section-heading-small">
|
||||
Gradient
|
||||
</Typography.Text>
|
||||
</ToggleGroupItem>
|
||||
</ToggleGroup>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
.line-interpolation-selector {
|
||||
.line-interpolation-icon {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
}
|
||||
|
||||
.line-interpolation-label {
|
||||
text-transform: uppercase;
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.line-interpolation-selector {
|
||||
.line-interpolation-label {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
import { ToggleGroup, ToggleGroupItem } from '@signozhq/toggle-group';
|
||||
import { Typography } from 'antd';
|
||||
import { LineInterpolation } from 'lib/uPlotV2/config/types';
|
||||
|
||||
import './LineInterpolationSelector.styles.scss';
|
||||
|
||||
interface LineInterpolationSelectorProps {
|
||||
value: LineInterpolation;
|
||||
onChange: (value: LineInterpolation) => void;
|
||||
}
|
||||
|
||||
export function LineInterpolationSelector({
|
||||
value,
|
||||
onChange,
|
||||
}: LineInterpolationSelectorProps): JSX.Element {
|
||||
return (
|
||||
<section className="line-interpolation-selector control-container">
|
||||
<Typography.Text className="section-heading">
|
||||
Line interpolation
|
||||
</Typography.Text>
|
||||
<ToggleGroup
|
||||
type="single"
|
||||
value={value}
|
||||
variant="outline"
|
||||
size="lg"
|
||||
onValueChange={(newValue): void => {
|
||||
if (newValue) {
|
||||
onChange(newValue as LineInterpolation);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<ToggleGroupItem
|
||||
value={LineInterpolation.Linear}
|
||||
aria-label="Linear"
|
||||
title="Linear"
|
||||
>
|
||||
<svg
|
||||
className="line-interpolation-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<circle cx="8" cy="32" r="3" fill="#888" />
|
||||
<circle cx="24" cy="16" r="3" fill="#888" />
|
||||
<circle cx="40" cy="32" r="3" fill="#888" />
|
||||
<path d="M8 32 L24 16 L40 32" stroke="#888" />
|
||||
</svg>
|
||||
</ToggleGroupItem>
|
||||
<ToggleGroupItem value={LineInterpolation.Spline} aria-label="Spline">
|
||||
<svg
|
||||
className="line-interpolation-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<circle cx="8" cy="32" r="3" fill="#888" />
|
||||
<circle cx="24" cy="16" r="3" fill="#888" />
|
||||
<circle cx="40" cy="32" r="3" fill="#888" />
|
||||
<path d="M8 32 C16 8, 32 8, 40 32" />
|
||||
</svg>
|
||||
</ToggleGroupItem>
|
||||
<ToggleGroupItem
|
||||
value={LineInterpolation.StepAfter}
|
||||
aria-label="Step After"
|
||||
>
|
||||
<svg
|
||||
className="line-interpolation-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<circle cx="8" cy="32" r="3" fill="#888" />
|
||||
<circle cx="24" cy="16" r="3" fill="#888" />
|
||||
<circle cx="40" cy="32" r="3" fill="#888" />
|
||||
<path d="M8 32 V16 H24 V32 H40" />
|
||||
</svg>
|
||||
</ToggleGroupItem>
|
||||
|
||||
<ToggleGroupItem
|
||||
value={LineInterpolation.StepBefore}
|
||||
aria-label="Step Before"
|
||||
>
|
||||
<svg
|
||||
className="line-interpolation-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<circle cx="8" cy="32" r="3" fill="#888" />
|
||||
<circle cx="24" cy="16" r="3" fill="#888" />
|
||||
<circle cx="40" cy="32" r="3" fill="#888" />
|
||||
<path d="M8 32 H24 V16 H40 V32" />
|
||||
</svg>
|
||||
</ToggleGroupItem>
|
||||
</ToggleGroup>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
.line-style-selector {
|
||||
.line-style-icon {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
}
|
||||
|
||||
.line-style-label {
|
||||
text-transform: uppercase;
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.line-style-selector {
|
||||
.line-style-label {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
import { ToggleGroup, ToggleGroupItem } from '@signozhq/toggle-group';
|
||||
import { Typography } from 'antd';
|
||||
import { LineStyle } from 'lib/uPlotV2/config/types';
|
||||
|
||||
import './LineStyleSelector.styles.scss';
|
||||
|
||||
interface LineStyleSelectorProps {
|
||||
value: LineStyle;
|
||||
onChange: (value: LineStyle) => void;
|
||||
}
|
||||
|
||||
export function LineStyleSelector({
|
||||
value,
|
||||
onChange,
|
||||
}: LineStyleSelectorProps): JSX.Element {
|
||||
return (
|
||||
<section className="line-style-selector control-container">
|
||||
<Typography.Text className="section-heading">Line style</Typography.Text>
|
||||
<ToggleGroup
|
||||
type="single"
|
||||
value={value}
|
||||
variant="outline"
|
||||
size="lg"
|
||||
onValueChange={(newValue): void => {
|
||||
if (newValue) {
|
||||
onChange(newValue as LineStyle);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<ToggleGroupItem value={LineStyle.Solid} aria-label="Solid" title="Solid">
|
||||
<svg
|
||||
className="line-style-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<path d="M8 24 L40 24" />
|
||||
</svg>
|
||||
<Typography.Text className="section-heading-small">Solid</Typography.Text>
|
||||
</ToggleGroupItem>
|
||||
<ToggleGroupItem
|
||||
value={LineStyle.Dashed}
|
||||
aria-label="Dashed"
|
||||
title="Dashed"
|
||||
>
|
||||
<svg
|
||||
className="line-style-icon"
|
||||
viewBox="0 0 48 48"
|
||||
fill="none"
|
||||
stroke="#888"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
strokeDasharray="6 4"
|
||||
>
|
||||
<path d="M8 24 L40 24" />
|
||||
</svg>
|
||||
<Typography.Text className="section-heading-small">Dashed</Typography.Text>
|
||||
</ToggleGroupItem>
|
||||
</ToggleGroup>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
@@ -43,7 +43,7 @@
|
||||
max-height: 0;
|
||||
overflow: hidden;
|
||||
opacity: 0;
|
||||
transition: max-height 0.25s ease, opacity 0.25s ease, padding 0.25s ease;
|
||||
transition: max-height 0.1s ease, opacity 0.1s ease, padding 0.1s ease;
|
||||
|
||||
&.open {
|
||||
padding-bottom: 24px;
|
||||
|
||||
@@ -206,3 +206,59 @@ export const panelTypeVsDecimalPrecision: {
|
||||
[PANEL_TYPES.TRACE]: false,
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: false,
|
||||
} as const;
|
||||
|
||||
export const panelTypeVsLineInterpolation: {
|
||||
[key in PANEL_TYPES]: boolean;
|
||||
} = {
|
||||
[PANEL_TYPES.TIME_SERIES]: true,
|
||||
[PANEL_TYPES.VALUE]: false,
|
||||
[PANEL_TYPES.TABLE]: false,
|
||||
[PANEL_TYPES.LIST]: false,
|
||||
[PANEL_TYPES.PIE]: false,
|
||||
[PANEL_TYPES.BAR]: false,
|
||||
[PANEL_TYPES.HISTOGRAM]: false,
|
||||
[PANEL_TYPES.TRACE]: false,
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: false,
|
||||
} as const;
|
||||
|
||||
export const panelTypeVsLineStyle: {
|
||||
[key in PANEL_TYPES]: boolean;
|
||||
} = {
|
||||
[PANEL_TYPES.TIME_SERIES]: true,
|
||||
[PANEL_TYPES.VALUE]: false,
|
||||
[PANEL_TYPES.TABLE]: false,
|
||||
[PANEL_TYPES.LIST]: false,
|
||||
[PANEL_TYPES.PIE]: false,
|
||||
[PANEL_TYPES.BAR]: false,
|
||||
[PANEL_TYPES.HISTOGRAM]: false,
|
||||
[PANEL_TYPES.TRACE]: false,
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: false,
|
||||
} as const;
|
||||
|
||||
export const panelTypeVsFillMode: {
|
||||
[key in PANEL_TYPES]: boolean;
|
||||
} = {
|
||||
[PANEL_TYPES.TIME_SERIES]: true,
|
||||
[PANEL_TYPES.VALUE]: false,
|
||||
[PANEL_TYPES.TABLE]: false,
|
||||
[PANEL_TYPES.LIST]: false,
|
||||
[PANEL_TYPES.PIE]: false,
|
||||
[PANEL_TYPES.BAR]: false,
|
||||
[PANEL_TYPES.HISTOGRAM]: false,
|
||||
[PANEL_TYPES.TRACE]: false,
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: false,
|
||||
} as const;
|
||||
|
||||
export const panelTypeVsShowPoints: {
|
||||
[key in PANEL_TYPES]: boolean;
|
||||
} = {
|
||||
[PANEL_TYPES.TIME_SERIES]: true,
|
||||
[PANEL_TYPES.VALUE]: false,
|
||||
[PANEL_TYPES.TABLE]: false,
|
||||
[PANEL_TYPES.LIST]: false,
|
||||
[PANEL_TYPES.PIE]: false,
|
||||
[PANEL_TYPES.BAR]: false,
|
||||
[PANEL_TYPES.HISTOGRAM]: false,
|
||||
[PANEL_TYPES.TRACE]: false,
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: false,
|
||||
} as const;
|
||||
|
||||
@@ -1,46 +1,16 @@
|
||||
import {
|
||||
Dispatch,
|
||||
SetStateAction,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { Dispatch, SetStateAction, useMemo } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import type { InputRef } from 'antd';
|
||||
import {
|
||||
AutoComplete,
|
||||
Input,
|
||||
InputNumber,
|
||||
Select,
|
||||
Switch,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { Typography } from 'antd';
|
||||
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
|
||||
import TimePreference from 'components/TimePreferenceDropDown';
|
||||
import { PANEL_TYPES, PanelDisplay } from 'constants/queryBuilder';
|
||||
import {
|
||||
ItemsProps,
|
||||
PanelTypesWithData,
|
||||
} from 'container/DashboardContainer/PanelTypeSelectionModal/menuItems';
|
||||
import { PanelTypesWithData } from 'container/DashboardContainer/PanelTypeSelectionModal/menuItems';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import useCreateAlerts from 'hooks/queryBuilder/useCreateAlerts';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import {
|
||||
Antenna,
|
||||
Axis3D,
|
||||
ConciergeBell,
|
||||
Layers,
|
||||
LayoutDashboard,
|
||||
LineChart,
|
||||
Link,
|
||||
Pencil,
|
||||
Plus,
|
||||
SlidersHorizontal,
|
||||
Spline,
|
||||
SquareArrowOutUpRight,
|
||||
} from 'lucide-react';
|
||||
FillMode,
|
||||
LineInterpolation,
|
||||
LineStyle,
|
||||
} from 'lib/uPlotV2/config/types';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import {
|
||||
ColumnUnit,
|
||||
@@ -49,56 +19,55 @@ import {
|
||||
Widgets,
|
||||
} from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import { ColumnUnitSelector } from './ColumnUnitSelector/ColumnUnitSelector';
|
||||
import SettingsSection from './components/SettingsSection/SettingsSection';
|
||||
import {
|
||||
panelTypeVsBucketConfig,
|
||||
panelTypeVsColumnUnitPreferences,
|
||||
panelTypeVsContextLinks,
|
||||
panelTypeVsCreateAlert,
|
||||
panelTypeVsDecimalPrecision,
|
||||
panelTypeVsFillMode,
|
||||
panelTypeVsFillSpan,
|
||||
panelTypeVsLegendColors,
|
||||
panelTypeVsLegendPosition,
|
||||
panelTypeVsLineInterpolation,
|
||||
panelTypeVsLineStyle,
|
||||
panelTypeVsLogScale,
|
||||
panelTypeVsPanelTimePreferences,
|
||||
panelTypeVsShowPoints,
|
||||
panelTypeVsSoftMinMax,
|
||||
panelTypeVsStackingChartPreferences,
|
||||
panelTypeVsThreshold,
|
||||
panelTypeVsYAxisUnit,
|
||||
} from './constants';
|
||||
import ContextLinks from './ContextLinks';
|
||||
import DashboardYAxisUnitSelectorWrapper from './DashboardYAxisUnitSelectorWrapper';
|
||||
import LegendColors from './LegendColors/LegendColors';
|
||||
import ThresholdSelector from './Threshold/ThresholdSelector';
|
||||
import AlertsSection from './SettingSections/AlertsSection/AlertsSection';
|
||||
import AxesSection from './SettingSections/AxesSection/AxesSection';
|
||||
import ChartAppearanceSection from './SettingSections/ChartAppearanceSection/ChartAppearanceSection';
|
||||
import ContextLinksSection from './SettingSections/ContextLinksSection/ContextLinksSection';
|
||||
import FormattingUnitsSection from './SettingSections/FormattingUnitsSection/FormattingUnitsSection';
|
||||
import GeneralSettingsSection from './SettingSections/GeneralSettingsSection/GeneralSettingsSection';
|
||||
import HistogramBucketsSection from './SettingSections/HistogramBucketsSection/HistogramBucketsSection';
|
||||
import LegendSection from './SettingSections/LegendSection/LegendSection';
|
||||
import ThresholdsSection from './SettingSections/ThresholdsSection/ThresholdsSection';
|
||||
import VisualizationSettingsSection from './SettingSections/VisualizationSettingsSection/VisualizationSettingsSection';
|
||||
import { ThresholdProps } from './Threshold/types';
|
||||
import { timePreferance } from './timeItems';
|
||||
|
||||
import './RightContainer.styles.scss';
|
||||
|
||||
const { TextArea } = Input;
|
||||
const { Option } = Select;
|
||||
|
||||
enum LogScale {
|
||||
LINEAR = 'linear',
|
||||
LOGARITHMIC = 'logarithmic',
|
||||
}
|
||||
|
||||
interface VariableOption {
|
||||
value: string;
|
||||
label: string;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function RightContainer({
|
||||
description,
|
||||
setDescription,
|
||||
setTitle,
|
||||
title,
|
||||
selectedGraph,
|
||||
lineInterpolation,
|
||||
setLineInterpolation,
|
||||
fillMode,
|
||||
setFillMode,
|
||||
lineStyle,
|
||||
setLineStyle,
|
||||
showPoints,
|
||||
setShowPoints,
|
||||
bucketCount,
|
||||
bucketWidth,
|
||||
stackedBarChart,
|
||||
@@ -138,20 +107,10 @@ function RightContainer({
|
||||
isNewDashboard,
|
||||
}: RightContainerProps): JSX.Element {
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
const [inputValue, setInputValue] = useState(title);
|
||||
const [autoCompleteOpen, setAutoCompleteOpen] = useState(false);
|
||||
const [cursorPos, setCursorPos] = useState(0);
|
||||
const inputRef = useRef<InputRef>(null);
|
||||
|
||||
const onChangeHandler = useCallback(
|
||||
(setFunc: Dispatch<SetStateAction<string>>, value: string) => {
|
||||
setFunc(value);
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const selectedGraphType =
|
||||
PanelTypesWithData.find((e) => e.name === selectedGraph)?.display || '';
|
||||
const selectedPanelDisplay = PanelTypesWithData.find(
|
||||
(e) => e.name === selectedGraph,
|
||||
)?.display as PanelDisplay;
|
||||
|
||||
const onCreateAlertsHandler = useCreateAlerts(selectedWidget, 'panelView');
|
||||
|
||||
@@ -175,16 +134,20 @@ function RightContainer({
|
||||
panelTypeVsContextLinks[selectedGraph] && enableDrillDown;
|
||||
const allowDecimalPrecision = panelTypeVsDecimalPrecision[selectedGraph];
|
||||
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
const allowLineInterpolation = panelTypeVsLineInterpolation[selectedGraph];
|
||||
const allowLineStyle = panelTypeVsLineStyle[selectedGraph];
|
||||
const allowFillMode = panelTypeVsFillMode[selectedGraph];
|
||||
const allowShowPoints = panelTypeVsShowPoints[selectedGraph];
|
||||
|
||||
const [graphTypes, setGraphTypes] = useState<ItemsProps[]>(PanelTypesWithData);
|
||||
|
||||
const dashboardVariableOptions = useMemo<VariableOption[]>(() => {
|
||||
return Object.entries(dashboardVariables).map(([, value]) => ({
|
||||
value: value.name || '',
|
||||
label: value.name || '',
|
||||
}));
|
||||
}, [dashboardVariables]);
|
||||
const decimapPrecisionOptions = useMemo(
|
||||
() => [
|
||||
{ label: '0 decimals', value: PrecisionOptionsEnum.ZERO },
|
||||
{ label: '1 decimal', value: PrecisionOptionsEnum.ONE },
|
||||
{ label: '2 decimals', value: PrecisionOptionsEnum.TWO },
|
||||
{ label: '3 decimals', value: PrecisionOptionsEnum.THREE },
|
||||
],
|
||||
[],
|
||||
);
|
||||
|
||||
const isAxisSectionVisible = useMemo(() => allowSoftMinMax || allowLogScale, [
|
||||
allowSoftMinMax,
|
||||
@@ -201,94 +164,20 @@ function RightContainer({
|
||||
[allowLegendPosition, allowLegendColors],
|
||||
);
|
||||
|
||||
const updateCursorAndDropdown = (value: string, pos: number): void => {
|
||||
setCursorPos(pos);
|
||||
const lastDollar = value.lastIndexOf('$', pos - 1);
|
||||
setAutoCompleteOpen(lastDollar !== -1 && pos >= lastDollar + 1);
|
||||
};
|
||||
const isChartAppearanceSectionVisible = useMemo(
|
||||
() =>
|
||||
/**
|
||||
* Disabled for now as we are not done with other settings in chart appearance section
|
||||
* TODO: @ahrefabhi Enable this after we are done other settings in chart appearance section
|
||||
*/
|
||||
|
||||
const onInputChange = (value: string): void => {
|
||||
setInputValue(value);
|
||||
onChangeHandler(setTitle, value);
|
||||
setTimeout(() => {
|
||||
const pos = inputRef.current?.input?.selectionStart ?? 0;
|
||||
updateCursorAndDropdown(value, pos);
|
||||
}, 0);
|
||||
};
|
||||
|
||||
const decimapPrecisionOptions = useMemo(() => {
|
||||
return [
|
||||
{ label: '0 decimals', value: PrecisionOptionsEnum.ZERO },
|
||||
{ label: '1 decimal', value: PrecisionOptionsEnum.ONE },
|
||||
{ label: '2 decimals', value: PrecisionOptionsEnum.TWO },
|
||||
{ label: '3 decimals', value: PrecisionOptionsEnum.THREE },
|
||||
];
|
||||
}, []);
|
||||
|
||||
const handleInputCursor = (): void => {
|
||||
const pos = inputRef.current?.input?.selectionStart ?? 0;
|
||||
updateCursorAndDropdown(inputValue, pos);
|
||||
};
|
||||
|
||||
const onSelect = (selectedValue: string): void => {
|
||||
const pos = cursorPos;
|
||||
const value = inputValue;
|
||||
const lastDollar = value.lastIndexOf('$', pos - 1);
|
||||
const textBeforeDollar = value.substring(0, lastDollar);
|
||||
const textAfterDollar = value.substring(lastDollar + 1);
|
||||
const match = textAfterDollar.match(/^([a-zA-Z0-9_.]*)/);
|
||||
const rest = textAfterDollar.substring(match ? match[1].length : 0);
|
||||
const newValue = `${textBeforeDollar}$${selectedValue}${rest}`;
|
||||
setInputValue(newValue);
|
||||
onChangeHandler(setTitle, newValue);
|
||||
setAutoCompleteOpen(false);
|
||||
setTimeout(() => {
|
||||
const newCursor = `${textBeforeDollar}$${selectedValue}`.length;
|
||||
inputRef.current?.input?.setSelectionRange(newCursor, newCursor);
|
||||
setCursorPos(newCursor);
|
||||
}, 0);
|
||||
};
|
||||
|
||||
const filterOption = (
|
||||
inputValue: string,
|
||||
option?: VariableOption,
|
||||
): boolean => {
|
||||
const pos = cursorPos;
|
||||
const value = inputValue;
|
||||
const lastDollar = value.lastIndexOf('$', pos - 1);
|
||||
if (lastDollar === -1) {
|
||||
return false;
|
||||
}
|
||||
const afterDollar = value.substring(lastDollar + 1, pos).toLowerCase();
|
||||
return option?.value.toLowerCase().startsWith(afterDollar) || false;
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const queryContainsMetricsDataSource = currentQuery.builder.queryData.some(
|
||||
(query) => query.dataSource === DataSource.METRICS,
|
||||
);
|
||||
|
||||
if (queryContainsMetricsDataSource) {
|
||||
setGraphTypes((prev) =>
|
||||
prev.filter((graph) => graph.name !== PANEL_TYPES.LIST),
|
||||
);
|
||||
} else {
|
||||
setGraphTypes(PanelTypesWithData);
|
||||
}
|
||||
}, [currentQuery]);
|
||||
|
||||
const softMinHandler = useCallback(
|
||||
(value: number | null) => {
|
||||
setSoftMin(value);
|
||||
},
|
||||
[setSoftMin],
|
||||
);
|
||||
|
||||
const softMaxHandler = useCallback(
|
||||
(value: number | null) => {
|
||||
setSoftMax(value);
|
||||
},
|
||||
[setSoftMax],
|
||||
// eslint-disable-next-line sonarjs/no-redundant-boolean
|
||||
false &&
|
||||
(allowFillMode ||
|
||||
allowLineStyle ||
|
||||
allowLineInterpolation ||
|
||||
allowShowPoints),
|
||||
[allowFillMode, allowLineStyle, allowLineInterpolation, allowShowPoints],
|
||||
);
|
||||
|
||||
return (
|
||||
@@ -298,336 +187,120 @@ function RightContainer({
|
||||
<Typography.Text className="header-text">Panel Settings</Typography.Text>
|
||||
</section>
|
||||
|
||||
<SettingsSection title="General" defaultOpen icon={<Pencil size={14} />}>
|
||||
<section className="name-description control-container">
|
||||
<Typography.Text className="typography">Name</Typography.Text>
|
||||
<AutoComplete
|
||||
options={dashboardVariableOptions}
|
||||
value={inputValue}
|
||||
onChange={onInputChange}
|
||||
onSelect={onSelect}
|
||||
filterOption={filterOption}
|
||||
style={{ width: '100%' }}
|
||||
getPopupContainer={popupContainer}
|
||||
placeholder="Enter the panel name here..."
|
||||
open={autoCompleteOpen}
|
||||
>
|
||||
<Input
|
||||
rootClassName="name-input"
|
||||
ref={inputRef}
|
||||
onSelect={handleInputCursor}
|
||||
onClick={handleInputCursor}
|
||||
onBlur={(): void => setAutoCompleteOpen(false)}
|
||||
/>
|
||||
</AutoComplete>
|
||||
<Typography.Text className="typography">Description</Typography.Text>
|
||||
<TextArea
|
||||
placeholder="Enter the panel description here..."
|
||||
bordered
|
||||
allowClear
|
||||
value={description}
|
||||
onChange={(event): void =>
|
||||
onChangeHandler(setDescription, event.target.value)
|
||||
}
|
||||
rootClassName="description-input"
|
||||
/>
|
||||
</section>
|
||||
</SettingsSection>
|
||||
<GeneralSettingsSection
|
||||
title={title}
|
||||
setTitle={setTitle}
|
||||
description={description}
|
||||
setDescription={setDescription}
|
||||
dashboardVariables={dashboardVariables}
|
||||
/>
|
||||
|
||||
<section className="panel-config">
|
||||
<SettingsSection
|
||||
title="Visualization"
|
||||
defaultOpen
|
||||
icon={<LayoutDashboard size={14} />}
|
||||
>
|
||||
<section className="panel-type control-container">
|
||||
<Typography.Text className="typography">Panel Type</Typography.Text>
|
||||
<Select
|
||||
onChange={setGraphHandler}
|
||||
value={selectedGraph}
|
||||
className="panel-type-select"
|
||||
data-testid="panel-change-select"
|
||||
data-stacking-state={stackedBarChart ? 'true' : 'false'}
|
||||
>
|
||||
{graphTypes.map((item) => (
|
||||
<Option key={item.name} value={item.name}>
|
||||
<div className="select-option">
|
||||
<div className="icon">{item.icon}</div>
|
||||
<Typography.Text className="display">{item.display}</Typography.Text>
|
||||
</div>
|
||||
</Option>
|
||||
))}
|
||||
</Select>
|
||||
</section>
|
||||
|
||||
{allowPanelTimePreference && (
|
||||
<section className="panel-time-preference control-container">
|
||||
<Typography.Text className="panel-time-text">
|
||||
Panel Time Preference
|
||||
</Typography.Text>
|
||||
<TimePreference
|
||||
{...{
|
||||
selectedTime,
|
||||
setSelectedTime,
|
||||
}}
|
||||
/>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{allowStackingBarChart && (
|
||||
<section className="stack-chart control-container">
|
||||
<Typography.Text className="label">Stack series</Typography.Text>
|
||||
<Switch
|
||||
checked={stackedBarChart}
|
||||
size="small"
|
||||
onChange={(checked): void => setStackedBarChart(checked)}
|
||||
/>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{allowFillSpans && (
|
||||
<section className="fill-gaps">
|
||||
<div className="fill-gaps-text-container">
|
||||
<Typography className="fill-gaps-text">Fill gaps</Typography>
|
||||
<Typography.Text className="fill-gaps-text-description">
|
||||
Fill gaps in data with 0 for continuity
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<Switch
|
||||
checked={isFillSpans}
|
||||
size="small"
|
||||
onChange={(checked): void => setIsFillSpans(checked)}
|
||||
/>
|
||||
</section>
|
||||
)}
|
||||
</SettingsSection>
|
||||
<VisualizationSettingsSection
|
||||
selectedGraph={selectedGraph}
|
||||
setGraphHandler={setGraphHandler}
|
||||
selectedTime={selectedTime}
|
||||
setSelectedTime={setSelectedTime}
|
||||
stackedBarChart={stackedBarChart}
|
||||
setStackedBarChart={setStackedBarChart}
|
||||
isFillSpans={isFillSpans}
|
||||
setIsFillSpans={setIsFillSpans}
|
||||
allowPanelTimePreference={allowPanelTimePreference}
|
||||
allowStackingBarChart={allowStackingBarChart}
|
||||
allowFillSpans={allowFillSpans}
|
||||
/>
|
||||
|
||||
{isFormattingSectionVisible && (
|
||||
<SettingsSection
|
||||
title="Formatting & Units"
|
||||
icon={<SlidersHorizontal size={14} />}
|
||||
>
|
||||
{allowYAxisUnit && (
|
||||
<DashboardYAxisUnitSelectorWrapper
|
||||
onSelect={setYAxisUnit}
|
||||
value={yAxisUnit || ''}
|
||||
fieldLabel={
|
||||
selectedGraphType === PanelDisplay.VALUE ||
|
||||
selectedGraphType === PanelDisplay.PIE
|
||||
? 'Unit'
|
||||
: 'Y Axis Unit'
|
||||
}
|
||||
// Only update the y-axis unit value automatically in create mode
|
||||
shouldUpdateYAxisUnit={isNewDashboard}
|
||||
/>
|
||||
)}
|
||||
<FormattingUnitsSection
|
||||
selectedPanelDisplay={selectedPanelDisplay}
|
||||
yAxisUnit={yAxisUnit}
|
||||
setYAxisUnit={setYAxisUnit}
|
||||
isNewDashboard={isNewDashboard}
|
||||
decimalPrecision={decimalPrecision}
|
||||
setDecimalPrecision={setDecimalPrecision}
|
||||
columnUnits={columnUnits}
|
||||
setColumnUnits={setColumnUnits}
|
||||
allowYAxisUnit={allowYAxisUnit}
|
||||
allowDecimalPrecision={allowDecimalPrecision}
|
||||
allowPanelColumnPreference={allowPanelColumnPreference}
|
||||
decimapPrecisionOptions={decimapPrecisionOptions}
|
||||
/>
|
||||
)}
|
||||
|
||||
{allowDecimalPrecision && (
|
||||
<section className="decimal-precision-selector control-container">
|
||||
<Typography.Text className="typography">
|
||||
Decimal Precision
|
||||
</Typography.Text>
|
||||
<Select
|
||||
options={decimapPrecisionOptions}
|
||||
value={decimalPrecision}
|
||||
style={{ width: '100%' }}
|
||||
className="panel-type-select"
|
||||
defaultValue={PrecisionOptionsEnum.TWO}
|
||||
onChange={(val: PrecisionOption): void => setDecimalPrecision(val)}
|
||||
/>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{allowPanelColumnPreference && (
|
||||
<ColumnUnitSelector
|
||||
columnUnits={columnUnits}
|
||||
setColumnUnits={setColumnUnits}
|
||||
isNewDashboard={isNewDashboard}
|
||||
/>
|
||||
)}
|
||||
</SettingsSection>
|
||||
{isChartAppearanceSectionVisible && (
|
||||
<ChartAppearanceSection
|
||||
fillMode={fillMode}
|
||||
setFillMode={setFillMode}
|
||||
lineStyle={lineStyle}
|
||||
setLineStyle={setLineStyle}
|
||||
lineInterpolation={lineInterpolation}
|
||||
setLineInterpolation={setLineInterpolation}
|
||||
showPoints={showPoints}
|
||||
setShowPoints={setShowPoints}
|
||||
allowFillMode={allowFillMode}
|
||||
allowLineStyle={allowLineStyle}
|
||||
allowLineInterpolation={allowLineInterpolation}
|
||||
allowShowPoints={allowShowPoints}
|
||||
/>
|
||||
)}
|
||||
|
||||
{isAxisSectionVisible && (
|
||||
<SettingsSection title="Axes" icon={<Axis3D size={14} />}>
|
||||
{allowSoftMinMax && (
|
||||
<section className="soft-min-max">
|
||||
<section className="container">
|
||||
<Typography.Text className="text">Soft Min</Typography.Text>
|
||||
<InputNumber
|
||||
type="number"
|
||||
value={softMin}
|
||||
onChange={softMinHandler}
|
||||
rootClassName="input"
|
||||
/>
|
||||
</section>
|
||||
<section className="container">
|
||||
<Typography.Text className="text">Soft Max</Typography.Text>
|
||||
<InputNumber
|
||||
value={softMax}
|
||||
type="number"
|
||||
rootClassName="input"
|
||||
onChange={softMaxHandler}
|
||||
/>
|
||||
</section>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{allowLogScale && (
|
||||
<section className="log-scale control-container">
|
||||
<Typography.Text className="typography">Y Axis Scale</Typography.Text>
|
||||
<Select
|
||||
onChange={(value): void =>
|
||||
setIsLogScale(value === LogScale.LOGARITHMIC)
|
||||
}
|
||||
value={isLogScale ? LogScale.LOGARITHMIC : LogScale.LINEAR}
|
||||
style={{ width: '100%' }}
|
||||
className="panel-type-select"
|
||||
defaultValue={LogScale.LINEAR}
|
||||
>
|
||||
<Option value={LogScale.LINEAR}>
|
||||
<div className="select-option">
|
||||
<div className="icon">
|
||||
<LineChart size={16} />
|
||||
</div>
|
||||
<Typography.Text className="display">Linear</Typography.Text>
|
||||
</div>
|
||||
</Option>
|
||||
<Option value={LogScale.LOGARITHMIC}>
|
||||
<div className="select-option">
|
||||
<div className="icon">
|
||||
<Spline size={16} />
|
||||
</div>
|
||||
<Typography.Text className="display">Logarithmic</Typography.Text>
|
||||
</div>
|
||||
</Option>
|
||||
</Select>
|
||||
</section>
|
||||
)}
|
||||
</SettingsSection>
|
||||
<AxesSection
|
||||
allowSoftMinMax={allowSoftMinMax}
|
||||
allowLogScale={allowLogScale}
|
||||
softMin={softMin}
|
||||
softMax={softMax}
|
||||
setSoftMin={setSoftMin}
|
||||
setSoftMax={setSoftMax}
|
||||
isLogScale={isLogScale}
|
||||
setIsLogScale={setIsLogScale}
|
||||
/>
|
||||
)}
|
||||
|
||||
{isLegendSectionVisible && (
|
||||
<SettingsSection title="Legend" icon={<Layers size={14} />}>
|
||||
{allowLegendPosition && (
|
||||
<section className="legend-position control-container">
|
||||
<Typography.Text className="typography">Position</Typography.Text>
|
||||
<Select
|
||||
onChange={(value: LegendPosition): void => setLegendPosition(value)}
|
||||
value={legendPosition}
|
||||
style={{ width: '100%' }}
|
||||
className="panel-type-select"
|
||||
defaultValue={LegendPosition.BOTTOM}
|
||||
>
|
||||
<Option value={LegendPosition.BOTTOM}>
|
||||
<div className="select-option">
|
||||
<Typography.Text className="display">Bottom</Typography.Text>
|
||||
</div>
|
||||
</Option>
|
||||
<Option value={LegendPosition.RIGHT}>
|
||||
<div className="select-option">
|
||||
<Typography.Text className="display">Right</Typography.Text>
|
||||
</div>
|
||||
</Option>
|
||||
</Select>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{allowLegendColors && (
|
||||
<section className="legend-colors">
|
||||
<LegendColors
|
||||
customLegendColors={customLegendColors}
|
||||
setCustomLegendColors={setCustomLegendColors}
|
||||
queryResponse={queryResponse}
|
||||
/>
|
||||
</section>
|
||||
)}
|
||||
</SettingsSection>
|
||||
<LegendSection
|
||||
allowLegendPosition={allowLegendPosition}
|
||||
allowLegendColors={allowLegendColors}
|
||||
legendPosition={legendPosition}
|
||||
setLegendPosition={setLegendPosition}
|
||||
customLegendColors={customLegendColors}
|
||||
setCustomLegendColors={setCustomLegendColors}
|
||||
queryResponse={queryResponse}
|
||||
/>
|
||||
)}
|
||||
|
||||
{allowBucketConfig && (
|
||||
<SettingsSection title="Histogram / Buckets">
|
||||
<section className="bucket-config control-container">
|
||||
<Typography.Text className="label">Number of buckets</Typography.Text>
|
||||
<InputNumber
|
||||
value={bucketCount || null}
|
||||
type="number"
|
||||
min={0}
|
||||
rootClassName="bucket-input"
|
||||
placeholder="Default: 30"
|
||||
onChange={(val): void => {
|
||||
setBucketCount(val || 0);
|
||||
}}
|
||||
/>
|
||||
<Typography.Text className="label bucket-size-label">
|
||||
Bucket width
|
||||
</Typography.Text>
|
||||
<InputNumber
|
||||
value={bucketWidth || null}
|
||||
type="number"
|
||||
precision={2}
|
||||
placeholder="Default: Auto"
|
||||
step={0.1}
|
||||
min={0.0}
|
||||
rootClassName="bucket-input"
|
||||
onChange={(val): void => {
|
||||
setBucketWidth(val || 0);
|
||||
}}
|
||||
/>
|
||||
<section className="combine-hist">
|
||||
<Typography.Text className="label">
|
||||
Merge all series into one
|
||||
</Typography.Text>
|
||||
<Switch
|
||||
checked={combineHistogram}
|
||||
size="small"
|
||||
onChange={(checked): void => setCombineHistogram(checked)}
|
||||
/>
|
||||
</section>
|
||||
</section>
|
||||
</SettingsSection>
|
||||
<HistogramBucketsSection
|
||||
bucketCount={bucketCount}
|
||||
setBucketCount={setBucketCount}
|
||||
bucketWidth={bucketWidth}
|
||||
setBucketWidth={setBucketWidth}
|
||||
combineHistogram={combineHistogram}
|
||||
setCombineHistogram={setCombineHistogram}
|
||||
/>
|
||||
)}
|
||||
</section>
|
||||
|
||||
{allowCreateAlerts && (
|
||||
<section className="alerts" onClick={onCreateAlertsHandler}>
|
||||
<div className="left-section">
|
||||
<ConciergeBell size={14} className="bell-icon" />
|
||||
<Typography.Text className="alerts-text">Alerts</Typography.Text>
|
||||
<SquareArrowOutUpRight size={10} className="info-icon" />
|
||||
</div>
|
||||
<Plus size={14} className="plus-icon" />
|
||||
</section>
|
||||
<AlertsSection onCreateAlertsHandler={onCreateAlertsHandler} />
|
||||
)}
|
||||
|
||||
{allowContextLinks && (
|
||||
<SettingsSection
|
||||
title="Context Links"
|
||||
icon={<Link size={14} />}
|
||||
defaultOpen={!!contextLinks.linksData.length}
|
||||
>
|
||||
<ContextLinks
|
||||
contextLinks={contextLinks}
|
||||
setContextLinks={setContextLinks}
|
||||
selectedWidget={selectedWidget}
|
||||
/>
|
||||
</SettingsSection>
|
||||
<ContextLinksSection
|
||||
contextLinks={contextLinks}
|
||||
setContextLinks={setContextLinks}
|
||||
selectedWidget={selectedWidget}
|
||||
/>
|
||||
)}
|
||||
|
||||
{allowThreshold && (
|
||||
<SettingsSection
|
||||
title="Thresholds"
|
||||
icon={<Antenna size={14} />}
|
||||
defaultOpen={!!thresholds.length}
|
||||
>
|
||||
<ThresholdSelector
|
||||
thresholds={thresholds}
|
||||
setThresholds={setThresholds}
|
||||
yAxisUnit={yAxisUnit}
|
||||
selectedGraph={selectedGraph}
|
||||
columnUnits={columnUnits}
|
||||
/>
|
||||
</SettingsSection>
|
||||
<ThresholdsSection
|
||||
thresholds={thresholds}
|
||||
setThresholds={setThresholds}
|
||||
yAxisUnit={yAxisUnit}
|
||||
selectedGraph={selectedGraph}
|
||||
columnUnits={columnUnits}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
@@ -683,6 +356,14 @@ export interface RightContainerProps {
|
||||
setContextLinks: Dispatch<SetStateAction<ContextLinksData>>;
|
||||
enableDrillDown?: boolean;
|
||||
isNewDashboard: boolean;
|
||||
lineInterpolation: LineInterpolation;
|
||||
setLineInterpolation: Dispatch<SetStateAction<LineInterpolation>>;
|
||||
fillMode: FillMode;
|
||||
setFillMode: Dispatch<SetStateAction<FillMode>>;
|
||||
lineStyle: LineStyle;
|
||||
setLineStyle: Dispatch<SetStateAction<LineStyle>>;
|
||||
showPoints: boolean;
|
||||
setShowPoints: Dispatch<SetStateAction<boolean>>;
|
||||
}
|
||||
|
||||
RightContainer.defaultProps = {
|
||||
|
||||
@@ -6,6 +6,11 @@ import { UseQueryResult } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { generatePath } from 'react-router-dom';
|
||||
import { WarningOutlined } from '@ant-design/icons';
|
||||
import {
|
||||
ResizableHandle,
|
||||
ResizablePanel,
|
||||
ResizablePanelGroup,
|
||||
} from '@signozhq/resizable';
|
||||
import { Button, Flex, Modal, Space, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
|
||||
@@ -24,12 +29,16 @@ import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import createQueryParams from 'lib/createQueryParams';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
|
||||
import {
|
||||
FillMode,
|
||||
LineInterpolation,
|
||||
LineStyle,
|
||||
} from 'lib/uPlotV2/config/types';
|
||||
import { cloneDeep, defaultTo, isEmpty, isUndefined } from 'lodash-es';
|
||||
import { Check, X } from 'lucide-react';
|
||||
import { useScrollToWidgetIdStore } from 'providers/Dashboard/helpers/scrollToWidgetIdHelper';
|
||||
@@ -63,12 +72,7 @@ import QueryTypeTag from './LeftContainer/QueryTypeTag';
|
||||
import RightContainer from './RightContainer';
|
||||
import { ThresholdProps } from './RightContainer/Threshold/types';
|
||||
import TimeItems, { timePreferance } from './RightContainer/timeItems';
|
||||
import {
|
||||
Container,
|
||||
LeftContainerWrapper,
|
||||
PanelContainer,
|
||||
RightContainerWrapper,
|
||||
} from './styles';
|
||||
import { Container, PanelContainer } from './styles';
|
||||
import { NewWidgetProps } from './types';
|
||||
import {
|
||||
getDefaultWidgetData,
|
||||
@@ -204,6 +208,18 @@ function NewWidget({
|
||||
const [legendPosition, setLegendPosition] = useState<LegendPosition>(
|
||||
selectedWidget?.legendPosition || LegendPosition.BOTTOM,
|
||||
);
|
||||
const [lineInterpolation, setLineInterpolation] = useState<LineInterpolation>(
|
||||
selectedWidget?.lineInterpolation || LineInterpolation.Spline,
|
||||
);
|
||||
const [fillMode, setFillMode] = useState<FillMode>(
|
||||
selectedWidget?.fillMode || FillMode.None,
|
||||
);
|
||||
const [lineStyle, setLineStyle] = useState<LineStyle>(
|
||||
selectedWidget?.lineStyle || LineStyle.Solid,
|
||||
);
|
||||
const [showPoints, setShowPoints] = useState<boolean>(
|
||||
selectedWidget?.showPoints ?? false,
|
||||
);
|
||||
const [customLegendColors, setCustomLegendColors] = useState<
|
||||
Record<string, string>
|
||||
>(selectedWidget?.customLegendColors || {});
|
||||
@@ -269,6 +285,10 @@ function NewWidget({
|
||||
softMin,
|
||||
softMax,
|
||||
fillSpans: isFillSpans,
|
||||
lineInterpolation,
|
||||
fillMode,
|
||||
lineStyle,
|
||||
showPoints,
|
||||
columnUnits,
|
||||
bucketCount,
|
||||
stackedBarChart,
|
||||
@@ -304,6 +324,10 @@ function NewWidget({
|
||||
stackedBarChart,
|
||||
isLogScale,
|
||||
legendPosition,
|
||||
lineInterpolation,
|
||||
fillMode,
|
||||
lineStyle,
|
||||
showPoints,
|
||||
customLegendColors,
|
||||
contextLinks,
|
||||
selectedWidget.columnWidths,
|
||||
@@ -757,7 +781,7 @@ function NewWidget({
|
||||
}, [query, safeNavigate, dashboardId, currentQuery]);
|
||||
|
||||
return (
|
||||
<Container>
|
||||
<Container className="new-widget-container">
|
||||
<div className="edit-header">
|
||||
<div className="left-header">
|
||||
<X
|
||||
@@ -811,79 +835,102 @@ function NewWidget({
|
||||
</div>
|
||||
|
||||
<PanelContainer>
|
||||
<LeftContainerWrapper isDarkMode={useIsDarkMode()}>
|
||||
<OverlayScrollbar>
|
||||
{selectedWidget && (
|
||||
<LeftContainer
|
||||
selectedGraph={graphType}
|
||||
selectedLogFields={selectedLogFields}
|
||||
setSelectedLogFields={setSelectedLogFields}
|
||||
selectedTracesFields={selectedTracesFields}
|
||||
setSelectedTracesFields={setSelectedTracesFields}
|
||||
selectedWidget={selectedWidget}
|
||||
selectedTime={selectedTime}
|
||||
requestData={requestData}
|
||||
setRequestData={setRequestData}
|
||||
isLoadingPanelData={isLoadingPanelData}
|
||||
setQueryResponse={setQueryResponse}
|
||||
enableDrillDown={enableDrillDown}
|
||||
selectedDashboard={selectedDashboard}
|
||||
isNewPanel={isNewPanel}
|
||||
/>
|
||||
)}
|
||||
</OverlayScrollbar>
|
||||
</LeftContainerWrapper>
|
||||
|
||||
<RightContainerWrapper>
|
||||
<RightContainer
|
||||
setGraphHandler={setGraphHandler}
|
||||
title={title}
|
||||
setTitle={setTitle}
|
||||
description={description}
|
||||
setDescription={setDescription}
|
||||
stackedBarChart={stackedBarChart}
|
||||
setStackedBarChart={setStackedBarChart}
|
||||
opacity={opacity}
|
||||
yAxisUnit={yAxisUnit}
|
||||
columnUnits={columnUnits}
|
||||
setColumnUnits={setColumnUnits}
|
||||
bucketCount={bucketCount}
|
||||
bucketWidth={bucketWidth}
|
||||
combineHistogram={combineHistogram}
|
||||
setCombineHistogram={setCombineHistogram}
|
||||
setBucketWidth={setBucketWidth}
|
||||
setBucketCount={setBucketCount}
|
||||
setOpacity={setOpacity}
|
||||
selectedNullZeroValue={selectedNullZeroValue}
|
||||
setSelectedNullZeroValue={setSelectedNullZeroValue}
|
||||
selectedGraph={graphType}
|
||||
setSelectedTime={setSelectedTime}
|
||||
selectedTime={selectedTime}
|
||||
setYAxisUnit={setYAxisUnit}
|
||||
decimalPrecision={decimalPrecision}
|
||||
setDecimalPrecision={setDecimalPrecision}
|
||||
thresholds={thresholds}
|
||||
setThresholds={setThresholds}
|
||||
selectedWidget={selectedWidget}
|
||||
isFillSpans={isFillSpans}
|
||||
setIsFillSpans={setIsFillSpans}
|
||||
isLogScale={isLogScale}
|
||||
setIsLogScale={setIsLogScale}
|
||||
legendPosition={legendPosition}
|
||||
setLegendPosition={setLegendPosition}
|
||||
customLegendColors={customLegendColors}
|
||||
setCustomLegendColors={setCustomLegendColors}
|
||||
queryResponse={queryResponse}
|
||||
softMin={softMin}
|
||||
setSoftMin={setSoftMin}
|
||||
softMax={softMax}
|
||||
setSoftMax={setSoftMax}
|
||||
contextLinks={contextLinks}
|
||||
setContextLinks={setContextLinks}
|
||||
enableDrillDown={enableDrillDown}
|
||||
isNewDashboard={isNewDashboard}
|
||||
/>
|
||||
</RightContainerWrapper>
|
||||
<ResizablePanelGroup
|
||||
direction="horizontal"
|
||||
className="widget-resizable-panel-group"
|
||||
autoSaveId="panel-editor"
|
||||
>
|
||||
<ResizablePanel
|
||||
minSize={70}
|
||||
maxSize={80}
|
||||
defaultSize={80}
|
||||
className="resizable-panel-left-container"
|
||||
>
|
||||
<OverlayScrollbar>
|
||||
{selectedWidget && (
|
||||
<LeftContainer
|
||||
selectedDashboard={selectedDashboard}
|
||||
selectedGraph={graphType}
|
||||
selectedLogFields={selectedLogFields}
|
||||
setSelectedLogFields={setSelectedLogFields}
|
||||
selectedTracesFields={selectedTracesFields}
|
||||
setSelectedTracesFields={setSelectedTracesFields}
|
||||
selectedWidget={selectedWidget}
|
||||
selectedTime={selectedTime}
|
||||
requestData={requestData}
|
||||
setRequestData={setRequestData}
|
||||
isLoadingPanelData={isLoadingPanelData}
|
||||
setQueryResponse={setQueryResponse}
|
||||
enableDrillDown={enableDrillDown}
|
||||
/>
|
||||
)}
|
||||
</OverlayScrollbar>
|
||||
</ResizablePanel>
|
||||
<ResizableHandle withHandle className="widget-resizable-handle" />
|
||||
<ResizablePanel
|
||||
minSize={20}
|
||||
maxSize={30}
|
||||
defaultSize={20}
|
||||
className="resizable-panel-right-container"
|
||||
>
|
||||
<RightContainer
|
||||
setGraphHandler={setGraphHandler}
|
||||
title={title}
|
||||
setTitle={setTitle}
|
||||
description={description}
|
||||
setDescription={setDescription}
|
||||
stackedBarChart={stackedBarChart}
|
||||
setStackedBarChart={setStackedBarChart}
|
||||
lineInterpolation={lineInterpolation}
|
||||
setLineInterpolation={setLineInterpolation}
|
||||
fillMode={fillMode}
|
||||
setFillMode={setFillMode}
|
||||
lineStyle={lineStyle}
|
||||
setLineStyle={setLineStyle}
|
||||
showPoints={showPoints}
|
||||
setShowPoints={setShowPoints}
|
||||
opacity={opacity}
|
||||
yAxisUnit={yAxisUnit}
|
||||
columnUnits={columnUnits}
|
||||
setColumnUnits={setColumnUnits}
|
||||
bucketCount={bucketCount}
|
||||
bucketWidth={bucketWidth}
|
||||
combineHistogram={combineHistogram}
|
||||
setCombineHistogram={setCombineHistogram}
|
||||
setBucketWidth={setBucketWidth}
|
||||
setBucketCount={setBucketCount}
|
||||
setOpacity={setOpacity}
|
||||
selectedNullZeroValue={selectedNullZeroValue}
|
||||
setSelectedNullZeroValue={setSelectedNullZeroValue}
|
||||
selectedGraph={graphType}
|
||||
setSelectedTime={setSelectedTime}
|
||||
selectedTime={selectedTime}
|
||||
setYAxisUnit={setYAxisUnit}
|
||||
decimalPrecision={decimalPrecision}
|
||||
setDecimalPrecision={setDecimalPrecision}
|
||||
thresholds={thresholds}
|
||||
setThresholds={setThresholds}
|
||||
selectedWidget={selectedWidget}
|
||||
isFillSpans={isFillSpans}
|
||||
setIsFillSpans={setIsFillSpans}
|
||||
isLogScale={isLogScale}
|
||||
setIsLogScale={setIsLogScale}
|
||||
legendPosition={legendPosition}
|
||||
setLegendPosition={setLegendPosition}
|
||||
customLegendColors={customLegendColors}
|
||||
setCustomLegendColors={setCustomLegendColors}
|
||||
queryResponse={queryResponse}
|
||||
softMin={softMin}
|
||||
setSoftMin={setSoftMin}
|
||||
softMax={softMax}
|
||||
setSoftMax={setSoftMax}
|
||||
contextLinks={contextLinks}
|
||||
setContextLinks={setContextLinks}
|
||||
enableDrillDown={enableDrillDown}
|
||||
isNewDashboard={isNewDashboard}
|
||||
/>
|
||||
</ResizablePanel>
|
||||
</ResizablePanelGroup>
|
||||
</PanelContainer>
|
||||
<Modal
|
||||
title={
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Col, Tag as AntDTag } from 'antd';
|
||||
import { Tag as AntDTag } from 'antd';
|
||||
import styled from 'styled-components';
|
||||
|
||||
export const Container = styled.div`
|
||||
@@ -8,42 +8,6 @@ export const Container = styled.div`
|
||||
overflow-y: hidden;
|
||||
`;
|
||||
|
||||
export const RightContainerWrapper = styled(Col)`
|
||||
&&& {
|
||||
max-width: 400px;
|
||||
width: 30%;
|
||||
overflow-y: auto;
|
||||
}
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
}
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: rgb(136, 136, 136);
|
||||
border-radius: 0.625rem;
|
||||
}
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
`;
|
||||
|
||||
interface LeftContainerWrapperProps {
|
||||
isDarkMode: boolean;
|
||||
}
|
||||
|
||||
export const LeftContainerWrapper = styled(Col)<LeftContainerWrapperProps>`
|
||||
&&& {
|
||||
width: 100%;
|
||||
overflow-y: auto;
|
||||
border-right: ${({ isDarkMode }): string =>
|
||||
isDarkMode
|
||||
? '1px solid var(--bg-slate-300)'
|
||||
: '1px solid var(--bg-vanilla-300)'};
|
||||
}
|
||||
&::-webkit-scrollbar {
|
||||
width: 0rem;
|
||||
}
|
||||
`;
|
||||
|
||||
export const ButtonContainer = styled.div`
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
|
||||
@@ -3,14 +3,15 @@ import { generateColor } from 'lib/uPlotLib/utils/generateColor';
|
||||
import { calculateWidthBasedOnStepInterval } from 'lib/uPlotV2/utils';
|
||||
import uPlot, { Series } from 'uplot';
|
||||
|
||||
import { generateGradientFill } from '../utils/generateGradientFill';
|
||||
import {
|
||||
BarAlignment,
|
||||
ConfigBuilder,
|
||||
DrawStyle,
|
||||
FillMode,
|
||||
LineInterpolation,
|
||||
LineStyle,
|
||||
SeriesProps,
|
||||
VisibilityMode,
|
||||
} from './types';
|
||||
|
||||
/**
|
||||
@@ -52,7 +53,7 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
}: {
|
||||
resolvedLineColor: string;
|
||||
}): Partial<Series> {
|
||||
const { lineWidth, lineStyle, lineCap, fillColor } = this.props;
|
||||
const { lineWidth, lineStyle, lineCap, fillColor, fillMode } = this.props;
|
||||
const lineConfig: Partial<Series> = {
|
||||
stroke: resolvedLineColor,
|
||||
width: lineWidth ?? DEFAULT_LINE_WIDTH,
|
||||
@@ -66,12 +67,26 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
lineConfig.cap = lineCap;
|
||||
}
|
||||
|
||||
if (fillColor) {
|
||||
lineConfig.fill = fillColor;
|
||||
} else if (this.props.drawStyle === DrawStyle.Bar) {
|
||||
lineConfig.fill = resolvedLineColor;
|
||||
/**
|
||||
* Configure area fill based on draw style and fill mode:
|
||||
* - bar charts always use a solid fill with the series color
|
||||
* - histogram uses the same color with a fixed alpha suffix for translucency
|
||||
* - for other series, an explicit fillMode controls whether we use a solid fill
|
||||
* or a vertical gradient from the series color to transparent
|
||||
*/
|
||||
const finalFillColor = fillColor ?? resolvedLineColor;
|
||||
|
||||
if (this.props.drawStyle === DrawStyle.Bar) {
|
||||
lineConfig.fill = finalFillColor;
|
||||
} else if (this.props.drawStyle === DrawStyle.Histogram) {
|
||||
lineConfig.fill = `${resolvedLineColor}40`;
|
||||
lineConfig.fill = `${finalFillColor}40`;
|
||||
} else if (fillMode && fillMode !== FillMode.None) {
|
||||
if (fillMode === FillMode.Solid) {
|
||||
lineConfig.fill = finalFillColor;
|
||||
} else if (fillMode === FillMode.Gradient) {
|
||||
lineConfig.fill = (self: uPlot): CanvasGradient =>
|
||||
generateGradientFill(self, finalFillColor, 'rgba(0, 0, 0, 0)');
|
||||
}
|
||||
}
|
||||
|
||||
return lineConfig;
|
||||
@@ -159,12 +174,8 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
pointsConfig.show = pointsBuilder;
|
||||
} else if (drawStyle === DrawStyle.Points) {
|
||||
pointsConfig.show = true;
|
||||
} else if (showPoints === VisibilityMode.Never) {
|
||||
pointsConfig.show = false;
|
||||
} else if (showPoints === VisibilityMode.Always) {
|
||||
pointsConfig.show = true;
|
||||
} else {
|
||||
pointsConfig.show = false; // default to hidden
|
||||
pointsConfig.show = !!showPoints;
|
||||
}
|
||||
|
||||
return pointsConfig;
|
||||
|
||||
@@ -2,12 +2,7 @@ import { themeColors } from 'constants/theme';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
import type { SeriesProps } from '../types';
|
||||
import {
|
||||
DrawStyle,
|
||||
LineInterpolation,
|
||||
LineStyle,
|
||||
VisibilityMode,
|
||||
} from '../types';
|
||||
import { DrawStyle, LineInterpolation, LineStyle } from '../types';
|
||||
import { POINT_SIZE_FACTOR, UPlotSeriesBuilder } from '../UPlotSeriesBuilder';
|
||||
|
||||
const createBaseProps = (
|
||||
@@ -168,17 +163,17 @@ describe('UPlotSeriesBuilder', () => {
|
||||
expect(config.points?.show).toBe(pointsBuilder);
|
||||
});
|
||||
|
||||
it('respects VisibilityMode for point visibility when no custom pointsBuilder is given', () => {
|
||||
it('respects showPoints for point visibility when no custom pointsBuilder is given', () => {
|
||||
const neverPointsBuilder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
drawStyle: DrawStyle.Line,
|
||||
showPoints: VisibilityMode.Never,
|
||||
showPoints: false,
|
||||
}),
|
||||
);
|
||||
const alwaysPointsBuilder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
drawStyle: DrawStyle.Line,
|
||||
showPoints: VisibilityMode.Always,
|
||||
showPoints: true,
|
||||
}),
|
||||
);
|
||||
|
||||
|
||||
@@ -122,12 +122,6 @@ export enum LineInterpolation {
|
||||
StepBefore = 'stepBefore',
|
||||
}
|
||||
|
||||
export enum VisibilityMode {
|
||||
Always = 'always',
|
||||
Auto = 'auto',
|
||||
Never = 'never',
|
||||
}
|
||||
|
||||
/**
|
||||
* Props for configuring lines
|
||||
*/
|
||||
@@ -163,7 +157,13 @@ export interface BarConfig {
|
||||
export interface PointsConfig {
|
||||
pointColor?: string;
|
||||
pointSize?: number;
|
||||
showPoints?: VisibilityMode;
|
||||
showPoints?: boolean;
|
||||
}
|
||||
|
||||
export enum FillMode {
|
||||
Solid = 'solid',
|
||||
Gradient = 'gradient',
|
||||
None = 'none',
|
||||
}
|
||||
|
||||
export interface SeriesProps extends LineConfig, PointsConfig, BarConfig {
|
||||
@@ -177,6 +177,7 @@ export interface SeriesProps extends LineConfig, PointsConfig, BarConfig {
|
||||
show?: boolean;
|
||||
spanGaps?: boolean;
|
||||
fillColor?: string;
|
||||
fillMode?: FillMode;
|
||||
isDarkMode?: boolean;
|
||||
stepInterval?: number;
|
||||
}
|
||||
|
||||
18
frontend/src/lib/uPlotV2/utils/generateGradientFill.ts
Normal file
18
frontend/src/lib/uPlotV2/utils/generateGradientFill.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import uPlot from 'uplot';
|
||||
|
||||
export function generateGradientFill(
|
||||
uPlotInstance: uPlot,
|
||||
startColor: string,
|
||||
endColor: string,
|
||||
): CanvasGradient {
|
||||
const g = uPlotInstance.ctx.createLinearGradient(
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
uPlotInstance.bbox.height,
|
||||
);
|
||||
g.addColorStop(0, `${startColor}70`);
|
||||
g.addColorStop(0.6, `${startColor}40`);
|
||||
g.addColorStop(1, endColor);
|
||||
return g;
|
||||
}
|
||||
@@ -5,6 +5,11 @@ import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
|
||||
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
|
||||
import { QueryTableProps } from 'container/QueryTable/QueryTable.intefaces';
|
||||
import {
|
||||
FillMode,
|
||||
LineInterpolation,
|
||||
LineStyle,
|
||||
} from 'lib/uPlotV2/config/types';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { IField } from '../logs/fields';
|
||||
@@ -132,6 +137,10 @@ export interface IBaseWidget {
|
||||
legendPosition?: LegendPosition;
|
||||
customLegendColors?: Record<string, string>;
|
||||
contextLinks?: ContextLinksData;
|
||||
lineInterpolation?: LineInterpolation;
|
||||
showPoints?: boolean;
|
||||
lineStyle?: LineStyle;
|
||||
fillMode?: FillMode;
|
||||
}
|
||||
export interface Widgets extends IBaseWidget {
|
||||
query: Query;
|
||||
|
||||
@@ -5646,7 +5646,7 @@
|
||||
tailwind-merge "^2.5.2"
|
||||
tailwindcss-animate "^1.0.7"
|
||||
|
||||
"@signozhq/toggle-group@^0.0.1":
|
||||
"@signozhq/toggle-group@0.0.1":
|
||||
version "0.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@signozhq/toggle-group/-/toggle-group-0.0.1.tgz#c82ff1da34e77b24da53c2d595ad6b4a0d1b1de4"
|
||||
integrity sha512-871bQayL5MaqsuNOFHKexidu9W2Hlg1y4xmH8C5mGmlfZ4bd0ovJ9OweQrM6Puys3jeMwi69xmJuesYCfKQc1g==
|
||||
|
||||
@@ -18,13 +18,12 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -48,7 +47,6 @@ type provider struct {
|
||||
gatewayHandler gateway.Handler
|
||||
fieldsHandler fields.Handler
|
||||
authzHandler authz.Handler
|
||||
rawDataExportHandler rawdataexport.Handler
|
||||
zeusHandler zeus.Handler
|
||||
querierHandler querier.Handler
|
||||
serviceAccountHandler serviceaccount.Handler
|
||||
@@ -71,7 +69,6 @@ func NewFactory(
|
||||
gatewayHandler gateway.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
rawDataExportHandler rawdataexport.Handler,
|
||||
zeusHandler zeus.Handler,
|
||||
querierHandler querier.Handler,
|
||||
serviceAccountHandler serviceaccount.Handler,
|
||||
@@ -97,7 +94,6 @@ func NewFactory(
|
||||
gatewayHandler,
|
||||
fieldsHandler,
|
||||
authzHandler,
|
||||
rawDataExportHandler,
|
||||
zeusHandler,
|
||||
querierHandler,
|
||||
serviceAccountHandler,
|
||||
@@ -125,7 +121,6 @@ func newProvider(
|
||||
gatewayHandler gateway.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
rawDataExportHandler rawdataexport.Handler,
|
||||
zeusHandler zeus.Handler,
|
||||
querierHandler querier.Handler,
|
||||
serviceAccountHandler serviceaccount.Handler,
|
||||
@@ -151,7 +146,6 @@ func newProvider(
|
||||
gatewayHandler: gatewayHandler,
|
||||
fieldsHandler: fieldsHandler,
|
||||
authzHandler: authzHandler,
|
||||
rawDataExportHandler: rawDataExportHandler,
|
||||
zeusHandler: zeusHandler,
|
||||
querierHandler: querierHandler,
|
||||
serviceAccountHandler: serviceAccountHandler,
|
||||
@@ -227,10 +221,6 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addRawDataExportRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addZeusRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -248,13 +238,13 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
|
||||
func newSecuritySchemes(role types.Role) []handler.OpenAPISecurityScheme {
|
||||
return []handler.OpenAPISecurityScheme{
|
||||
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{role.String()}},
|
||||
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{role.String()}},
|
||||
{Name: authtypes.IdentNProviderAPIkey.StringValue(), Scopes: []string{role.String()}},
|
||||
{Name: authtypes.IdentNProviderTokenizer.StringValue(), Scopes: []string{role.String()}},
|
||||
}
|
||||
}
|
||||
|
||||
func newAnonymousSecuritySchemes(scopes []string) []handler.OpenAPISecurityScheme {
|
||||
return []handler.OpenAPISecurityScheme{
|
||||
{Name: ctxtypes.AuthTypeAnonymous.StringValue(), Scopes: scopes},
|
||||
{Name: authtypes.IdentNProviderAnonymous.StringValue(), Scopes: scopes},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/exporttypes"
|
||||
v5 "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addRawDataExportRoutes(router *mux.Router) error {
|
||||
|
||||
if err := router.Handle("/api/v1/export_raw_data", handler.New(provider.authZ.ViewAccess(provider.rawDataExportHandler.ExportRawData), handler.OpenAPIDef{
|
||||
ID: "HandleExportRawDataPOST",
|
||||
Tags: []string{"logs", "traces"},
|
||||
Summary: "Export raw data",
|
||||
Description: "This endpoints allows complex query exporting raw data for traces and logs",
|
||||
Request: new(v5.QueryRangeRequest),
|
||||
RequestQuery: new(exporttypes.ExportRawDataFormatQueryParam),
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
@@ -73,7 +72,7 @@ func (provider *provider) addSessionRoutes(router *mux.Router) error {
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{{Name: ctxtypes.AuthTypeTokenizer.StringValue()}},
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{{Name: authtypes.IdentNProviderTokenizer.StringValue()}},
|
||||
})).Methods(http.MethodDelete).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
@@ -208,7 +208,7 @@ func (provider *provider) addUserRoutes(router *mux.Router) error {
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{{Name: ctxtypes.AuthTypeTokenizer.StringValue()}},
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{{Name: authtypes.IdentNProviderTokenizer.StringValue()}},
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -30,5 +30,5 @@ func (a *AuthN) Authenticate(ctx context.Context, email string, password string,
|
||||
return nil, errors.New(errors.TypeUnauthenticated, types.ErrCodeIncorrectPassword, "invalid email or password")
|
||||
}
|
||||
|
||||
return authtypes.NewIdentity(user.ID, orgID, user.Email, user.Role), nil
|
||||
return authtypes.NewIdentity(user.ID, orgID, user.Email, user.Role, authtypes.IdentNProviderTokenizer), nil
|
||||
}
|
||||
|
||||
@@ -1,143 +0,0 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/sharder"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"golang.org/x/sync/singleflight"
|
||||
)
|
||||
|
||||
const (
|
||||
apiKeyCrossOrgMessage string = "::API-KEY-CROSS-ORG::"
|
||||
)
|
||||
|
||||
type APIKey struct {
|
||||
store sqlstore.SQLStore
|
||||
uuid *authtypes.UUID
|
||||
headers []string
|
||||
logger *slog.Logger
|
||||
sharder sharder.Sharder
|
||||
sfGroup *singleflight.Group
|
||||
}
|
||||
|
||||
func NewAPIKey(store sqlstore.SQLStore, headers []string, logger *slog.Logger, sharder sharder.Sharder) *APIKey {
|
||||
return &APIKey{
|
||||
store: store,
|
||||
uuid: authtypes.NewUUID(),
|
||||
headers: headers,
|
||||
logger: logger,
|
||||
sharder: sharder,
|
||||
sfGroup: &singleflight.Group{},
|
||||
}
|
||||
}
|
||||
|
||||
func (a *APIKey) Wrap(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
var values []string
|
||||
var apiKeyToken string
|
||||
var apiKey types.StorableAPIKey
|
||||
|
||||
for _, header := range a.headers {
|
||||
values = append(values, r.Header.Get(header))
|
||||
}
|
||||
|
||||
ctx, err := a.uuid.ContextFromRequest(r.Context(), values...)
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
apiKeyToken, ok := authtypes.UUIDFromContext(ctx)
|
||||
if !ok {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
err = a.
|
||||
store.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(&apiKey).
|
||||
Where("token = ?", apiKeyToken).
|
||||
Scan(r.Context())
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
// allow the APIKey if expires_at is not set
|
||||
if apiKey.ExpiresAt.Before(time.Now()) && !apiKey.ExpiresAt.Equal(types.NEVER_EXPIRES) {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
// get user from db
|
||||
user := types.User{}
|
||||
err = a.store.BunDB().NewSelect().Model(&user).Where("id = ?", apiKey.UserID).Scan(r.Context())
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
jwt := authtypes.Claims{
|
||||
UserID: user.ID.String(),
|
||||
Role: apiKey.Role,
|
||||
Email: user.Email.String(),
|
||||
OrgID: user.OrgID.String(),
|
||||
}
|
||||
|
||||
ctx = authtypes.NewContextWithClaims(ctx, jwt)
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
if err := a.sharder.IsMyOwnedKey(r.Context(), types.NewOrganizationKey(valuer.MustNewUUID(claims.OrgID))); err != nil {
|
||||
a.logger.ErrorContext(r.Context(), apiKeyCrossOrgMessage, "claims", claims, "error", err)
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
ctx = ctxtypes.SetAuthType(ctx, ctxtypes.AuthTypeAPIKey)
|
||||
|
||||
comment := ctxtypes.CommentFromContext(ctx)
|
||||
comment.Set("auth_type", ctxtypes.AuthTypeAPIKey.StringValue())
|
||||
comment.Set("user_id", claims.UserID)
|
||||
comment.Set("org_id", claims.OrgID)
|
||||
|
||||
r = r.WithContext(ctxtypes.NewContextWithComment(ctx, comment))
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
|
||||
lastUsedCtx := context.WithoutCancel(r.Context())
|
||||
_, _, _ = a.sfGroup.Do(apiKey.ID.StringValue(), func() (any, error) {
|
||||
apiKey.LastUsed = time.Now()
|
||||
_, err = a.
|
||||
store.
|
||||
BunDB().
|
||||
NewUpdate().
|
||||
Model(&apiKey).
|
||||
Column("last_used").
|
||||
Where("token = ?", apiKeyToken).
|
||||
Where("revoked = false").
|
||||
Exec(lastUsedCtx)
|
||||
if err != nil {
|
||||
a.logger.ErrorContext(lastUsedCtx, "failed to update last used of api key", "error", err)
|
||||
}
|
||||
|
||||
return true, nil
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
@@ -1,150 +0,0 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sharder"
|
||||
"github.com/SigNoz/signoz/pkg/tokenizer"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"golang.org/x/sync/singleflight"
|
||||
)
|
||||
|
||||
const (
|
||||
authCrossOrgMessage string = "::AUTH-CROSS-ORG::"
|
||||
)
|
||||
|
||||
type AuthN struct {
|
||||
tokenizer tokenizer.Tokenizer
|
||||
headers []string
|
||||
sharder sharder.Sharder
|
||||
logger *slog.Logger
|
||||
sfGroup *singleflight.Group
|
||||
}
|
||||
|
||||
func NewAuthN(headers []string, sharder sharder.Sharder, tokenizer tokenizer.Tokenizer, logger *slog.Logger) *AuthN {
|
||||
return &AuthN{
|
||||
headers: headers,
|
||||
sharder: sharder,
|
||||
tokenizer: tokenizer,
|
||||
logger: logger,
|
||||
sfGroup: &singleflight.Group{},
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthN) Wrap(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
var values []string
|
||||
for _, header := range a.headers {
|
||||
values = append(values, r.Header.Get(header))
|
||||
}
|
||||
|
||||
ctx, err := a.contextFromRequest(r.Context(), values...)
|
||||
if err != nil {
|
||||
r = r.WithContext(ctx)
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
r = r.WithContext(ctx)
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
if err := a.sharder.IsMyOwnedKey(r.Context(), types.NewOrganizationKey(valuer.MustNewUUID(claims.OrgID))); err != nil {
|
||||
a.logger.ErrorContext(r.Context(), authCrossOrgMessage, "claims", claims, "error", err)
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
ctx = ctxtypes.SetAuthType(ctx, ctxtypes.AuthTypeTokenizer)
|
||||
|
||||
comment := ctxtypes.CommentFromContext(ctx)
|
||||
comment.Set("auth_type", ctxtypes.AuthTypeTokenizer.StringValue())
|
||||
comment.Set("tokenizer_provider", a.tokenizer.Config().Provider)
|
||||
comment.Set("user_id", claims.UserID)
|
||||
comment.Set("org_id", claims.OrgID)
|
||||
|
||||
r = r.WithContext(ctxtypes.NewContextWithComment(ctx, comment))
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
|
||||
accessToken, err := authtypes.AccessTokenFromContext(r.Context())
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
lastObservedAtCtx := context.WithoutCancel(r.Context())
|
||||
_, _, _ = a.sfGroup.Do(accessToken, func() (any, error) {
|
||||
if err := a.tokenizer.SetLastObservedAt(lastObservedAtCtx, accessToken, time.Now()); err != nil {
|
||||
a.logger.ErrorContext(lastObservedAtCtx, "failed to set last observed at", "error", err)
|
||||
return false, err
|
||||
}
|
||||
|
||||
return true, nil
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func (a *AuthN) contextFromRequest(ctx context.Context, values ...string) (context.Context, error) {
|
||||
ctx, err := a.contextFromAccessToken(ctx, values...)
|
||||
if err != nil {
|
||||
return ctx, err
|
||||
}
|
||||
|
||||
accessToken, err := authtypes.AccessTokenFromContext(ctx)
|
||||
if err != nil {
|
||||
return ctx, err
|
||||
}
|
||||
|
||||
authenticatedUser, err := a.tokenizer.GetIdentity(ctx, accessToken)
|
||||
if err != nil {
|
||||
return ctx, err
|
||||
}
|
||||
|
||||
return authtypes.NewContextWithClaims(ctx, authenticatedUser.ToClaims()), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) contextFromAccessToken(ctx context.Context, values ...string) (context.Context, error) {
|
||||
var value string
|
||||
for _, v := range values {
|
||||
if v != "" {
|
||||
value = v
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if value == "" {
|
||||
return ctx, errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "missing authorization header")
|
||||
}
|
||||
|
||||
// parse from
|
||||
bearerToken, ok := parseBearerAuth(value)
|
||||
if !ok {
|
||||
// this will take care that if the value is not of type bearer token, directly use it
|
||||
bearerToken = value
|
||||
}
|
||||
|
||||
return authtypes.NewContextWithAccessToken(ctx, bearerToken), nil
|
||||
}
|
||||
|
||||
func parseBearerAuth(auth string) (string, bool) {
|
||||
const prefix = "Bearer "
|
||||
// Case insensitive prefix match
|
||||
if len(auth) < len(prefix) || !strings.EqualFold(auth[:len(prefix)], prefix) {
|
||||
return "", false
|
||||
}
|
||||
|
||||
return auth[len(prefix):], true
|
||||
}
|
||||
@@ -44,7 +44,7 @@ func (middleware *AuthZ) ViewAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
|
||||
commentCtx := ctxtypes.CommentFromContext(ctx)
|
||||
authtype, ok := commentCtx.Map()["auth_type"]
|
||||
if ok && authtype == ctxtypes.AuthTypeAPIKey.StringValue() {
|
||||
if ok && (authtype == authtypes.IdentNProviderAPIkey.StringValue()) {
|
||||
if err := claims.IsViewer(); err != nil {
|
||||
middleware.logger.WarnContext(ctx, authzDeniedMessage, "claims", claims)
|
||||
render.Error(rw, err)
|
||||
@@ -96,7 +96,7 @@ func (middleware *AuthZ) EditAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
|
||||
commentCtx := ctxtypes.CommentFromContext(ctx)
|
||||
authtype, ok := commentCtx.Map()["auth_type"]
|
||||
if ok && authtype == ctxtypes.AuthTypeAPIKey.StringValue() {
|
||||
if ok && (authtype == authtypes.IdentNProviderAPIkey.StringValue()) {
|
||||
if err := claims.IsEditor(); err != nil {
|
||||
middleware.logger.WarnContext(ctx, authzDeniedMessage, "claims", claims)
|
||||
render.Error(rw, err)
|
||||
@@ -147,7 +147,7 @@ func (middleware *AuthZ) AdminAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
|
||||
commentCtx := ctxtypes.CommentFromContext(ctx)
|
||||
authtype, ok := commentCtx.Map()["auth_type"]
|
||||
if ok && authtype == ctxtypes.AuthTypeAPIKey.StringValue() {
|
||||
if ok && (authtype == authtypes.IdentNProviderAPIkey.StringValue()) {
|
||||
if err := claims.IsAdmin(); err != nil {
|
||||
middleware.logger.WarnContext(ctx, authzDeniedMessage, "claims", claims)
|
||||
render.Error(rw, err)
|
||||
|
||||
75
pkg/http/middleware/identn.go
Normal file
75
pkg/http/middleware/identn.go
Normal file
@@ -0,0 +1,75 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/identn"
|
||||
"github.com/SigNoz/signoz/pkg/sharder"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
const (
|
||||
identityCrossOrgMessage string = "::IDENTITY-CROSS-ORG::"
|
||||
)
|
||||
|
||||
type IdentN struct {
|
||||
resolver identn.IdentNResolver
|
||||
sharder sharder.Sharder
|
||||
logger *slog.Logger
|
||||
}
|
||||
|
||||
func NewIdentN(resolver identn.IdentNResolver, sharder sharder.Sharder, logger *slog.Logger) *IdentN {
|
||||
return &IdentN{
|
||||
resolver: resolver,
|
||||
sharder: sharder,
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
func (m *IdentN) Wrap(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
idn := m.resolver.GetIdentN(r)
|
||||
if idn == nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
if pre, ok := idn.(identn.IdentNWithPreHook); ok {
|
||||
r = pre.Pre(r)
|
||||
}
|
||||
|
||||
identity, err := idn.GetIdentity(r)
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := r.Context()
|
||||
claims := identity.ToClaims()
|
||||
if err := m.sharder.IsMyOwnedKey(ctx, types.NewOrganizationKey(valuer.MustNewUUID(claims.OrgID))); err != nil {
|
||||
m.logger.ErrorContext(ctx, identityCrossOrgMessage, "claims", claims, "error", err)
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
ctx = authtypes.NewContextWithClaims(ctx, claims)
|
||||
|
||||
comment := ctxtypes.CommentFromContext(ctx)
|
||||
comment.Set("identn_provider", claims.IdentNProvider)
|
||||
comment.Set("user_id", claims.UserID)
|
||||
comment.Set("org_id", claims.OrgID)
|
||||
ctx = ctxtypes.NewContextWithComment(ctx, comment)
|
||||
|
||||
r = r.WithContext(ctx)
|
||||
next.ServeHTTP(w, r)
|
||||
|
||||
if hook, ok := idn.(identn.IdentNWithPostHook); ok {
|
||||
hook.Post(context.WithoutCancel(r.Context()), r, claims)
|
||||
}
|
||||
})
|
||||
}
|
||||
131
pkg/identn/apikeyidentn/identn.go
Normal file
131
pkg/identn/apikeyidentn/identn.go
Normal file
@@ -0,0 +1,131 @@
|
||||
package apikeyidentn
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/identn"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"golang.org/x/sync/singleflight"
|
||||
)
|
||||
|
||||
// todo: will move this in types layer with service account integration
|
||||
type apiKeyTokenKey struct{}
|
||||
|
||||
type resolver struct {
|
||||
store sqlstore.SQLStore
|
||||
headers []string
|
||||
settings factory.ScopedProviderSettings
|
||||
sfGroup *singleflight.Group
|
||||
}
|
||||
|
||||
func New(providerSettings factory.ProviderSettings, store sqlstore.SQLStore, headers []string) identn.IdentN {
|
||||
return &resolver{
|
||||
store: store,
|
||||
headers: headers,
|
||||
settings: factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/identn/apikeyidentn"),
|
||||
sfGroup: &singleflight.Group{},
|
||||
}
|
||||
}
|
||||
|
||||
func (r *resolver) Name() authtypes.IdentNProvider {
|
||||
return authtypes.IdentNProviderAPIkey
|
||||
}
|
||||
|
||||
func (r *resolver) Test(req *http.Request) bool {
|
||||
for _, header := range r.headers {
|
||||
if req.Header.Get(header) != "" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (r *resolver) Pre(req *http.Request) *http.Request {
|
||||
token := r.extractToken(req)
|
||||
if token == "" {
|
||||
return req
|
||||
}
|
||||
|
||||
ctx := context.WithValue(req.Context(), apiKeyTokenKey{}, token)
|
||||
return req.WithContext(ctx)
|
||||
}
|
||||
|
||||
func (r *resolver) GetIdentity(req *http.Request) (*authtypes.Identity, error) {
|
||||
ctx := req.Context()
|
||||
|
||||
apiKeyToken, ok := ctx.Value(apiKeyTokenKey{}).(string)
|
||||
if !ok || apiKeyToken == "" {
|
||||
return nil, errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "missing api key")
|
||||
}
|
||||
|
||||
var apiKey types.StorableAPIKey
|
||||
err := r.store.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(&apiKey).
|
||||
Where("token = ?", apiKeyToken).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if apiKey.ExpiresAt.Before(time.Now()) && !apiKey.ExpiresAt.Equal(types.NEVER_EXPIRES) {
|
||||
return nil, errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "api key has expired")
|
||||
}
|
||||
|
||||
var user types.User
|
||||
err = r.store.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(&user).
|
||||
Where("id = ?", apiKey.UserID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
identity := authtypes.Identity{
|
||||
UserID: user.ID,
|
||||
Role: apiKey.Role,
|
||||
Email: user.Email,
|
||||
OrgID: user.OrgID,
|
||||
}
|
||||
return &identity, nil
|
||||
}
|
||||
|
||||
func (r *resolver) Post(ctx context.Context, _ *http.Request, _ authtypes.Claims) {
|
||||
apiKeyToken, ok := ctx.Value(apiKeyTokenKey{}).(string)
|
||||
if !ok || apiKeyToken == "" {
|
||||
return
|
||||
}
|
||||
|
||||
_, _, _ = r.sfGroup.Do(apiKeyToken, func() (any, error) {
|
||||
_, err := r.store.
|
||||
BunDB().
|
||||
NewUpdate().
|
||||
Model(new(types.StorableAPIKey)).
|
||||
Set("last_used = ?", time.Now()).
|
||||
Where("token = ?", apiKeyToken).
|
||||
Where("revoked = false").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
r.settings.Logger().ErrorContext(ctx, "failed to update last used of api key", "error", err)
|
||||
}
|
||||
return true, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (r *resolver) extractToken(req *http.Request) string {
|
||||
for _, header := range r.headers {
|
||||
if v := req.Header.Get(header); v != "" {
|
||||
return v
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
43
pkg/identn/identn.go
Normal file
43
pkg/identn/identn.go
Normal file
@@ -0,0 +1,43 @@
|
||||
package identn
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
)
|
||||
|
||||
type IdentNResolver interface {
|
||||
// GetIdentN returns the first IdentN whose Test() returns true for the request.
|
||||
// Returns nil if no resolver matched.
|
||||
GetIdentN(r *http.Request) IdentN
|
||||
}
|
||||
|
||||
type IdentN interface {
|
||||
// Test checks if this identN can handle the request.
|
||||
// This should be a cheap check (e.g., header presence) with no I/O.
|
||||
Test(r *http.Request) bool
|
||||
|
||||
// GetIdentity returns the resolved identity.
|
||||
// Only called when Test() returns true.
|
||||
GetIdentity(r *http.Request) (*authtypes.Identity, error)
|
||||
|
||||
Name() authtypes.IdentNProvider
|
||||
}
|
||||
|
||||
// IdentNWithPreHook is optionally implemented by resolvers that need to
|
||||
// enrich the request before authentication (e.g., storing the access token
|
||||
// in context so downstream handlers can use it even on auth failure).
|
||||
type IdentNWithPreHook interface {
|
||||
IdentN
|
||||
|
||||
Pre(r *http.Request) *http.Request
|
||||
}
|
||||
|
||||
// IdentNWithPostHook is optionally implemented by resolvers that need
|
||||
// post-response side-effects (e.g., updating last_observed_at).
|
||||
type IdentNWithPostHook interface {
|
||||
IdentN
|
||||
|
||||
Post(ctx context.Context, r *http.Request, claims authtypes.Claims)
|
||||
}
|
||||
31
pkg/identn/resolver.go
Normal file
31
pkg/identn/resolver.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package identn
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
)
|
||||
|
||||
type identNResolver struct {
|
||||
identNs []IdentN
|
||||
settings factory.ScopedProviderSettings
|
||||
}
|
||||
|
||||
func NewIdentNResolver(providerSettings factory.ProviderSettings, identNs ...IdentN) IdentNResolver {
|
||||
return &identNResolver{
|
||||
identNs: identNs,
|
||||
settings: factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/identn"),
|
||||
}
|
||||
}
|
||||
|
||||
// GetIdentN returns the first IdentN whose Test() returns true.
|
||||
// Returns nil if no resolver matched.
|
||||
func (c *identNResolver) GetIdentN(r *http.Request) IdentN {
|
||||
for _, idn := range c.identNs {
|
||||
if idn.Test(r) {
|
||||
c.settings.Logger().DebugContext(r.Context(), "identN matched", "provider", idn.Name())
|
||||
return idn
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
103
pkg/identn/tokenizeridentn/identn.go
Normal file
103
pkg/identn/tokenizeridentn/identn.go
Normal file
@@ -0,0 +1,103 @@
|
||||
package tokenizeridentn
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/identn"
|
||||
"github.com/SigNoz/signoz/pkg/tokenizer"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"golang.org/x/sync/singleflight"
|
||||
)
|
||||
|
||||
type resolver struct {
|
||||
tokenizer tokenizer.Tokenizer
|
||||
headers []string
|
||||
settings factory.ScopedProviderSettings
|
||||
sfGroup *singleflight.Group
|
||||
}
|
||||
|
||||
func New(providerSettings factory.ProviderSettings, tokenizer tokenizer.Tokenizer, headers []string) identn.IdentN {
|
||||
return &resolver{
|
||||
tokenizer: tokenizer,
|
||||
headers: headers,
|
||||
settings: factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/identn/tokenizeridentn"),
|
||||
sfGroup: &singleflight.Group{},
|
||||
}
|
||||
}
|
||||
|
||||
func (r *resolver) Name() authtypes.IdentNProvider {
|
||||
return authtypes.IdentNProviderTokenizer
|
||||
}
|
||||
|
||||
func (r *resolver) Test(req *http.Request) bool {
|
||||
for _, header := range r.headers {
|
||||
if req.Header.Get(header) != "" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (r *resolver) Pre(req *http.Request) *http.Request {
|
||||
accessToken := r.extractToken(req)
|
||||
if accessToken == "" {
|
||||
return req
|
||||
}
|
||||
|
||||
ctx := authtypes.NewContextWithAccessToken(req.Context(), accessToken)
|
||||
return req.WithContext(ctx)
|
||||
}
|
||||
|
||||
func (r *resolver) GetIdentity(req *http.Request) (*authtypes.Identity, error) {
|
||||
ctx := req.Context()
|
||||
|
||||
accessToken, err := authtypes.AccessTokenFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return r.tokenizer.GetIdentity(ctx, accessToken)
|
||||
}
|
||||
|
||||
func (r *resolver) Post(ctx context.Context, _ *http.Request, _ authtypes.Claims) {
|
||||
accessToken, err := authtypes.AccessTokenFromContext(ctx)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
_, _, _ = r.sfGroup.Do(accessToken, func() (any, error) {
|
||||
if err := r.tokenizer.SetLastObservedAt(ctx, accessToken, time.Now()); err != nil {
|
||||
r.settings.Logger().ErrorContext(ctx, "failed to set last observed at", "error", err)
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (r *resolver) extractToken(req *http.Request) string {
|
||||
var value string
|
||||
for _, header := range r.headers {
|
||||
if v := req.Header.Get(header); v != "" {
|
||||
value = v
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
accessToken, ok := r.parseBearerAuth(value)
|
||||
if !ok {
|
||||
return value
|
||||
}
|
||||
return accessToken
|
||||
}
|
||||
|
||||
func (r *resolver) parseBearerAuth(auth string) (string, bool) {
|
||||
const prefix = "Bearer "
|
||||
if len(auth) < len(prefix) || !strings.EqualFold(auth[:len(prefix)], prefix) {
|
||||
return "", false
|
||||
}
|
||||
return auth[len(prefix):], true
|
||||
}
|
||||
41
pkg/instrumentation/loghandler/filtering.go
Normal file
41
pkg/instrumentation/loghandler/filtering.go
Normal file
@@ -0,0 +1,41 @@
|
||||
package loghandler
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
)
|
||||
|
||||
type filtering struct{}
|
||||
|
||||
func NewFiltering() *filtering {
|
||||
return &filtering{}
|
||||
}
|
||||
|
||||
func (h *filtering) Wrap(next LogHandler) LogHandler {
|
||||
return LogHandlerFunc(func(ctx context.Context, record slog.Record) error {
|
||||
if !filterRecord(record) {
|
||||
return nil
|
||||
}
|
||||
|
||||
return next.Handle(ctx, record)
|
||||
})
|
||||
}
|
||||
|
||||
func filterRecord(record slog.Record) bool {
|
||||
suppress := false
|
||||
record.Attrs(func(a slog.Attr) bool {
|
||||
if a.Value.Kind() == slog.KindAny {
|
||||
if err, ok := a.Value.Any().(error); ok {
|
||||
if errors.Is(err, context.Canceled) {
|
||||
suppress = true
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
return !suppress
|
||||
}
|
||||
52
pkg/instrumentation/loghandler/filtering_test.go
Normal file
52
pkg/instrumentation/loghandler/filtering_test.go
Normal file
@@ -0,0 +1,52 @@
|
||||
package loghandler
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"log/slog"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestFiltering_SuppressesContextCanceled(t *testing.T) {
|
||||
filtering := NewFiltering()
|
||||
|
||||
buf := bytes.NewBuffer(nil)
|
||||
logger := slog.New(&handler{base: slog.NewJSONHandler(buf, &slog.HandlerOptions{Level: slog.LevelDebug}), wrappers: []Wrapper{filtering}})
|
||||
|
||||
logger.ErrorContext(context.Background(), "operation failed", "error", context.Canceled)
|
||||
|
||||
assert.Empty(t, buf.String(), "log with context.Canceled should be suppressed")
|
||||
}
|
||||
|
||||
func TestFiltering_AllowsOtherErrors(t *testing.T) {
|
||||
filtering := NewFiltering()
|
||||
|
||||
buf := bytes.NewBuffer(nil)
|
||||
logger := slog.New(&handler{base: slog.NewJSONHandler(buf, &slog.HandlerOptions{Level: slog.LevelDebug}), wrappers: []Wrapper{filtering}})
|
||||
|
||||
logger.ErrorContext(context.Background(), "operation failed", "error", errors.New(errors.TypeInternal, errors.CodeInternal, "some other error"))
|
||||
|
||||
m := make(map[string]any)
|
||||
err := json.Unmarshal(buf.Bytes(), &m)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "operation failed", m["msg"])
|
||||
}
|
||||
|
||||
func TestFiltering_AllowsLogsWithoutErrors(t *testing.T) {
|
||||
filtering := NewFiltering()
|
||||
|
||||
buf := bytes.NewBuffer(nil)
|
||||
logger := slog.New(&handler{base: slog.NewJSONHandler(buf, &slog.HandlerOptions{Level: slog.LevelDebug}), wrappers: []Wrapper{filtering}})
|
||||
|
||||
logger.InfoContext(context.Background(), "normal log", "key", "value")
|
||||
|
||||
m := make(map[string]any)
|
||||
err := json.Unmarshal(buf.Bytes(), &m)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "normal log", m["msg"])
|
||||
}
|
||||
@@ -116,7 +116,7 @@ func New(ctx context.Context, cfg Config, build version.Build, serviceName strin
|
||||
meterProvider: meterProvider,
|
||||
meterProviderShutdownFunc: meterProviderShutdownFunc,
|
||||
prometheusRegistry: prometheusRegistry,
|
||||
logger: NewLogger(cfg, loghandler.NewCorrelation()),
|
||||
logger: NewLogger(cfg, loghandler.NewCorrelation(), loghandler.NewFiltering()),
|
||||
startCh: make(chan struct{}),
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/transition"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
@@ -109,7 +108,7 @@ func (handler *handler) Update(rw http.ResponseWriter, r *http.Request) {
|
||||
|
||||
diff := 0
|
||||
// Allow multiple deletions for API key requests; enforce for others
|
||||
if authType, ok := ctxtypes.AuthTypeFromContext(ctx); ok && authType == ctxtypes.AuthTypeTokenizer {
|
||||
if claims.IdentNProvider == authtypes.IdentNProviderTokenizer.StringValue() {
|
||||
diff = 1
|
||||
}
|
||||
|
||||
|
||||
@@ -6,19 +6,20 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"slices"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/exporttypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
@@ -30,31 +31,129 @@ func NewHandler(module rawdataexport.Module) rawdataexport.Handler {
|
||||
return &handler{module: module}
|
||||
}
|
||||
|
||||
// ExportRawData handles data export requests.
|
||||
//
|
||||
// API Documentation:
|
||||
// Endpoint: GET /api/v1/export_raw_data
|
||||
//
|
||||
// Query Parameters:
|
||||
//
|
||||
// - source (optional): Type of data to export ["logs" (default), "metrics", "traces"]
|
||||
// Note: Currently only "logs" is fully supported
|
||||
//
|
||||
// - format (optional): Output format ["csv" (default), "jsonl"]
|
||||
//
|
||||
// - start (required): Start time for query (Unix timestamp in nanoseconds)
|
||||
//
|
||||
// - end (required): End time for query (Unix timestamp in nanoseconds)
|
||||
//
|
||||
// - limit (optional): Maximum number of rows to export
|
||||
// Constraints: Must be positive and cannot exceed MAX_EXPORT_ROW_COUNT_LIMIT
|
||||
//
|
||||
// - filter (optional): Filter expression to apply to the query
|
||||
//
|
||||
// - columns (optional): Specific columns to include in export
|
||||
// Default: all columns are returned
|
||||
// Format: ["context.field:type", "context.field", "field"]
|
||||
//
|
||||
// - order_by (optional): Sorting specification ["column:direction" or "context.field:type:direction"]
|
||||
// Direction: "asc" or "desc"
|
||||
// Default: ["timestamp:desc", "id:desc"]
|
||||
//
|
||||
// Response Headers:
|
||||
// - Content-Type: "text/csv" or "application/x-ndjson"
|
||||
// - Content-Encoding: "gzip" (handled by HTTP middleware)
|
||||
// - Content-Disposition: "attachment; filename=\"data_exported.[format]\""
|
||||
// - Cache-Control: "no-cache"
|
||||
// - Vary: "Accept-Encoding"
|
||||
// - Transfer-Encoding: "chunked"
|
||||
// - Trailers: X-Response-Complete
|
||||
//
|
||||
// Response Format:
|
||||
//
|
||||
// CSV: Headers in first row, data in subsequent rows
|
||||
// JSONL: One JSON object per line
|
||||
//
|
||||
// Example Usage:
|
||||
//
|
||||
// Basic CSV export:
|
||||
// GET /api/v1/export_raw_data?start=1693612800000000000&end=1693699199000000000
|
||||
//
|
||||
// Export with columns and format:
|
||||
// GET /api/v1/export_raw_data?start=1693612800000000000&end=1693699199000000000&format=jsonl
|
||||
// &columns=timestamp&columns=severity&columns=message
|
||||
//
|
||||
// Export with filter and ordering:
|
||||
// GET /api/v1/export_raw_data?start=1693612800000000000&end=1693699199000000000
|
||||
// &filter=severity="error"&order_by=timestamp:desc&limit=1000
|
||||
func (handler *handler) ExportRawData(rw http.ResponseWriter, r *http.Request) {
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
|
||||
var formatParam exporttypes.ExportRawDataFormatQueryParam
|
||||
if err := binding.Query.BindQuery(r.URL.Query(), &formatParam); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
format := formatParam.Format
|
||||
if err := binding.JSON.BindBody(r.Body, &queryRangeRequest); err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid request body: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateSpecForExport(&queryRangeRequest); err != nil {
|
||||
source, err := getExportQuerySource(r.URL.Query())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateAndApplyDefaultExportLimits(queryRangeRequest.CompositeQuery.Queries); err != nil {
|
||||
switch source {
|
||||
case "logs":
|
||||
handler.exportLogs(rw, r)
|
||||
case "traces":
|
||||
handler.exportTraces(rw, r)
|
||||
case "metrics":
|
||||
handler.exportMetrics(rw, r)
|
||||
default:
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid source: must be logs"))
|
||||
}
|
||||
}
|
||||
|
||||
func (handler *handler) exportMetrics(rw http.ResponseWriter, r *http.Request) {
|
||||
render.Error(rw, errors.Newf(errors.TypeUnsupported, errors.CodeUnsupported, "metrics export is not yet supported"))
|
||||
}
|
||||
|
||||
func (handler *handler) exportTraces(rw http.ResponseWriter, r *http.Request) {
|
||||
render.Error(rw, errors.Newf(errors.TypeUnsupported, errors.CodeUnsupported, "traces export is not yet supported"))
|
||||
}
|
||||
|
||||
func (handler *handler) exportLogs(rw http.ResponseWriter, r *http.Request) {
|
||||
// Set up response headers
|
||||
rw.Header().Set("Cache-Control", "no-cache")
|
||||
rw.Header().Set("Vary", "Accept-Encoding") // Indicate that response varies based on Accept-Encoding
|
||||
rw.Header().Set("Access-Control-Expose-Headers", "Content-Disposition, X-Response-Complete")
|
||||
rw.Header().Set("Trailer", "X-Response-Complete")
|
||||
rw.Header().Set("Transfer-Encoding", "chunked")
|
||||
|
||||
queryParams := r.URL.Query()
|
||||
|
||||
startTime, endTime, err := getExportQueryTimeRange(queryParams)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
queryRangeRequest.UseDefaultOrderBy()
|
||||
limit, err := getExportQueryLimit(queryParams)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
format, err := getExportQueryFormat(queryParams)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Set appropriate content type and filename
|
||||
filename := fmt.Sprintf("data_exported_%s.%s", time.Now().Format("2006-01-02_150405"), format)
|
||||
rw.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
|
||||
|
||||
filterExpression := queryParams.Get("filter")
|
||||
|
||||
orderByExpression, err := getExportQueryOrderBy(queryParams)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
columns := getExportQueryColumns(queryParams)
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
@@ -62,98 +161,76 @@ func (handler *handler) ExportRawData(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgID is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
setExportResponseHeaders(rw, format)
|
||||
queryRangeRequest := qbtypes.QueryRangeRequest{
|
||||
Start: startTime,
|
||||
End: endTime,
|
||||
RequestType: qbtypes.RequestTypeRaw,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
spec := qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Name: "raw",
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: filterExpression,
|
||||
},
|
||||
Limit: limit,
|
||||
Order: orderByExpression,
|
||||
}
|
||||
|
||||
spec.SelectFields = columns
|
||||
|
||||
queryRangeRequest.CompositeQuery.Queries[0].Spec = spec
|
||||
|
||||
// This will signal Export module to stop sending data
|
||||
doneChan := make(chan any)
|
||||
defer close(doneChan)
|
||||
rowChan, errChan := handler.module.ExportRawData(r.Context(), orgID, &queryRangeRequest, doneChan)
|
||||
|
||||
isComplete, err := handler.executeExport(rowChan, errChan, format, rw)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
rw.Header().Set("X-Response-Complete", strconv.FormatBool(isComplete))
|
||||
}
|
||||
var isComplete bool
|
||||
|
||||
// validateSpecForExport validates query specs
|
||||
func validateSpecForExport(req *qbtypes.QueryRangeRequest) error {
|
||||
|
||||
queries := req.CompositeQuery.Queries
|
||||
|
||||
// If the trace operator query is not present, and there are multiple queries, return an error
|
||||
if req.TraceOperatorQueryIndex() == -1 && len(queries) > 1 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "multiple queries not allowed without a trace operator query")
|
||||
}
|
||||
|
||||
for idx := range queries {
|
||||
switch spec := queries[idx].Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||
qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
|
||||
qbtypes.QueryBuilderTraceOperator:
|
||||
// Supported spec types
|
||||
default:
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported query at index %d type: %T", idx, spec)
|
||||
}
|
||||
}
|
||||
|
||||
opts := append(qbtypes.GetValidationOptions(req.RequestType), qbtypes.WithSkipLimitOffsetValidation())
|
||||
return req.Validate(opts...)
|
||||
}
|
||||
|
||||
func validateAndApplyDefaultExportLimits(queries []qbtypes.QueryEnvelope) error {
|
||||
for idx := range queries {
|
||||
limit := queries[idx].GetLimit()
|
||||
if limit == 0 {
|
||||
limit = DefaultExportRowCountLimit
|
||||
} else if limit < 0 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be positive")
|
||||
} else if limit > MaxExportRowCountLimit {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit cannot be more than %d", MaxExportRowCountLimit)
|
||||
}
|
||||
queries[idx].SetLimit(limit)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// setExportResponseHeaders sets common HTTP headers for export responses.
|
||||
func setExportResponseHeaders(rw http.ResponseWriter, format string) {
|
||||
rw.Header().Set("Cache-Control", "no-cache")
|
||||
rw.Header().Set("Vary", "Accept-Encoding")
|
||||
rw.Header().Set("Access-Control-Expose-Headers", "Content-Disposition, X-Response-Complete")
|
||||
rw.Header().Set("Trailer", "X-Response-Complete")
|
||||
rw.Header().Set("Transfer-Encoding", "chunked")
|
||||
filename := fmt.Sprintf("data_exported_%s.%s", time.Now().Format("2006-01-02_150405"), format)
|
||||
rw.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
|
||||
}
|
||||
|
||||
// executeExport streams data from rowChan to the response writer in the specified format.
|
||||
func (handler *handler) executeExport(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, format string, rw http.ResponseWriter) (bool, error) {
|
||||
switch format {
|
||||
case "csv", "":
|
||||
rw.Header().Set("Content-Type", "text/csv")
|
||||
csvWriter := csv.NewWriter(rw)
|
||||
isComplete, err := handler.exportRawDataCSV(rowChan, errChan, csvWriter)
|
||||
isComplete, err = handler.exportLogsCSV(rowChan, errChan, csvWriter)
|
||||
if err != nil {
|
||||
return false, err
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
csvWriter.Flush()
|
||||
return isComplete, nil
|
||||
case "jsonl":
|
||||
rw.Header().Set("Content-Type", "application/x-ndjson")
|
||||
return handler.exportRawDataJSONL(rowChan, errChan, rw)
|
||||
isComplete, err = handler.exportLogsJSONL(rowChan, errChan, rw)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
default:
|
||||
return false, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl")
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl"))
|
||||
return
|
||||
}
|
||||
|
||||
rw.Header().Set("X-Response-Complete", strconv.FormatBool(isComplete))
|
||||
}
|
||||
|
||||
// exportRawDataCSV is a generic CSV export function that works with any raw data (logs, traces, etc.)
|
||||
func (handler *handler) exportRawDataCSV(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, csvWriter *csv.Writer) (bool, error) {
|
||||
|
||||
func (handler *handler) exportLogsCSV(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, csvWriter *csv.Writer) (bool, error) {
|
||||
var header []string
|
||||
headerToIndexMapping := make(map[string]int)
|
||||
|
||||
headerToIndexMapping := make(map[string]int, len(header))
|
||||
|
||||
totalBytes := uint64(0)
|
||||
for {
|
||||
@@ -191,8 +268,8 @@ func (handler *handler) exportRawDataCSV(rowChan <-chan *qbtypes.RawRow, errChan
|
||||
}
|
||||
}
|
||||
|
||||
// exportRawDataJSONL is a generic JSONL export function that works with any raw data (logs, traces, etc.)
|
||||
func (handler *handler) exportRawDataJSONL(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, writer io.Writer) (bool, error) {
|
||||
func (handler *handler) exportLogsJSONL(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, writer io.Writer) (bool, error) {
|
||||
|
||||
totalBytes := uint64(0)
|
||||
for {
|
||||
select {
|
||||
@@ -200,11 +277,9 @@ func (handler *handler) exportRawDataJSONL(rowChan <-chan *qbtypes.RawRow, errCh
|
||||
if !ok {
|
||||
return true, nil
|
||||
}
|
||||
jsonBytes, err := json.Marshal(row.Data)
|
||||
if err != nil {
|
||||
return false, errors.NewUnexpectedf(errors.CodeInternal, "error marshaling JSON: %s", err)
|
||||
}
|
||||
totalBytes += uint64(len(jsonBytes)) + 1
|
||||
// Handle JSON format (JSONL - one object per line)
|
||||
jsonBytes, _ := json.Marshal(row.Data)
|
||||
totalBytes += uint64(len(jsonBytes)) + 1 // +1 for newline
|
||||
|
||||
if _, err := writer.Write(jsonBytes); err != nil {
|
||||
return false, errors.NewUnexpectedf(errors.CodeInternal, "error writing JSON: %s", err)
|
||||
@@ -224,33 +299,74 @@ func (handler *handler) exportRawDataJSONL(rowChan <-chan *qbtypes.RawRow, errCh
|
||||
}
|
||||
}
|
||||
|
||||
// priorityColumns defines the columns that should appear first in the CSV output, in order.
|
||||
var priorityColumns = []string{"timestamp", "id"}
|
||||
func getExportQuerySource(queryParams url.Values) (string, error) {
|
||||
switch queryParams.Get("source") {
|
||||
case "logs", "":
|
||||
return "logs", nil
|
||||
case "metrics":
|
||||
return "metrics", errors.NewInvalidInputf(errors.CodeInvalidInput, "metrics export not yet supported")
|
||||
case "traces":
|
||||
return "traces", errors.NewInvalidInputf(errors.CodeInvalidInput, "traces export not yet supported")
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid source: must be logs, metrics or traces")
|
||||
}
|
||||
}
|
||||
|
||||
func getExportQueryFormat(queryParams url.Values) (string, error) {
|
||||
switch queryParams.Get("format") {
|
||||
case "csv", "":
|
||||
return "csv", nil
|
||||
case "jsonl":
|
||||
return "jsonl", nil
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl")
|
||||
}
|
||||
}
|
||||
|
||||
func getExportQueryLimit(queryParams url.Values) (int, error) {
|
||||
|
||||
limitStr := queryParams.Get("limit")
|
||||
if limitStr == "" {
|
||||
return DefaultExportRowCountLimit, nil
|
||||
} else {
|
||||
limit, err := strconv.Atoi(limitStr)
|
||||
if err != nil {
|
||||
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid limit format: %s", err.Error())
|
||||
}
|
||||
if limit <= 0 {
|
||||
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be positive")
|
||||
}
|
||||
if limit > MaxExportRowCountLimit {
|
||||
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "limit cannot be more than %d", MaxExportRowCountLimit)
|
||||
}
|
||||
return limit, nil
|
||||
}
|
||||
}
|
||||
|
||||
func getExportQueryTimeRange(queryParams url.Values) (uint64, uint64, error) {
|
||||
|
||||
startTimeStr := queryParams.Get("start")
|
||||
endTimeStr := queryParams.Get("end")
|
||||
|
||||
if startTimeStr == "" || endTimeStr == "" {
|
||||
return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "start and end time are required")
|
||||
}
|
||||
startTime, err := strconv.ParseUint(startTimeStr, 10, 64)
|
||||
if err != nil {
|
||||
return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid start time format: %s", err.Error())
|
||||
}
|
||||
endTime, err := strconv.ParseUint(endTimeStr, 10, 64)
|
||||
if err != nil {
|
||||
return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid end time format: %s", err.Error())
|
||||
}
|
||||
return startTime, endTime, nil
|
||||
}
|
||||
|
||||
func constructCSVHeaderFromQueryResponse(data map[string]any) []string {
|
||||
header := make([]string, 0, len(data))
|
||||
for key := range data {
|
||||
header = append(header, key)
|
||||
}
|
||||
// This is to ensure CSV output is consistent across multiple queries
|
||||
slices.SortFunc(header, func(a, b string) int {
|
||||
ai, bi := slices.Index(priorityColumns, a), slices.Index(priorityColumns, b)
|
||||
switch {
|
||||
case ai != -1 && bi != -1:
|
||||
return ai - bi
|
||||
case ai != -1:
|
||||
return -1
|
||||
case bi != -1:
|
||||
return 1
|
||||
default:
|
||||
if a < b {
|
||||
return -1
|
||||
} else if a > b {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
})
|
||||
return header
|
||||
}
|
||||
|
||||
@@ -311,12 +427,9 @@ func constructCSVRecordFromQueryResponse(data map[string]any, headerToIndexMappi
|
||||
valueStr = v.String()
|
||||
|
||||
default:
|
||||
jsonBytes, err := json.Marshal(v)
|
||||
if err != nil {
|
||||
valueStr = fmt.Sprintf("%v", v)
|
||||
} else {
|
||||
valueStr = string(jsonBytes)
|
||||
}
|
||||
// For all other complex types (maps, structs, etc.)
|
||||
jsonBytes, _ := json.Marshal(v)
|
||||
valueStr = string(jsonBytes)
|
||||
}
|
||||
|
||||
record[index] = sanitizeForCSV(valueStr)
|
||||
@@ -325,6 +438,26 @@ func constructCSVRecordFromQueryResponse(data map[string]any, headerToIndexMappi
|
||||
return record
|
||||
}
|
||||
|
||||
// getExportQueryColumns parses the "columns" query parameters and returns a slice of TelemetryFieldKey structs.
|
||||
// Each column should be a valid telemetry field key in the format "context.field:type" or "context.field" or "field"
|
||||
func getExportQueryColumns(queryParams url.Values) []telemetrytypes.TelemetryFieldKey {
|
||||
columnParams := queryParams["columns"]
|
||||
|
||||
columns := make([]telemetrytypes.TelemetryFieldKey, 0, len(columnParams))
|
||||
|
||||
for _, columnStr := range columnParams {
|
||||
// Skip empty strings
|
||||
columnStr = strings.TrimSpace(columnStr)
|
||||
if columnStr == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
columns = append(columns, telemetrytypes.GetFieldKeyFromKeyText(columnStr))
|
||||
}
|
||||
|
||||
return columns
|
||||
}
|
||||
|
||||
func getsizeOfStringSlice(slice []string) uint64 {
|
||||
var totalBytes uint64
|
||||
for _, str := range slice {
|
||||
@@ -332,3 +465,52 @@ func getsizeOfStringSlice(slice []string) uint64 {
|
||||
}
|
||||
return totalBytes
|
||||
}
|
||||
|
||||
// getExportQueryOrderBy parses the "order_by" query parameters and returns a slice of OrderBy structs.
|
||||
// Each "order_by" parameter should be in the format "column:direction"
|
||||
// Each "column" should be a valid telemetry field key in the format "context.field:type" or "context.field" or "field"
|
||||
func getExportQueryOrderBy(queryParams url.Values) ([]qbtypes.OrderBy, error) {
|
||||
orderByParam := queryParams.Get("order_by")
|
||||
|
||||
orderByParam = strings.TrimSpace(orderByParam)
|
||||
if orderByParam == "" {
|
||||
return telemetrylogs.DefaultLogsV2SortingOrder, nil
|
||||
}
|
||||
|
||||
parts := strings.Split(orderByParam, ":")
|
||||
if len(parts) != 2 && len(parts) != 3 {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order_by format: %s, should be <column>:<direction>", orderByParam)
|
||||
}
|
||||
|
||||
column := strings.Join(parts[:len(parts)-1], ":")
|
||||
direction := parts[len(parts)-1]
|
||||
|
||||
orderDirection, ok := qbtypes.OrderDirectionMap[direction]
|
||||
if !ok {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order_by direction: %s, should be one of %s, %s", direction, qbtypes.OrderDirectionAsc, qbtypes.OrderDirectionDesc)
|
||||
}
|
||||
|
||||
orderByKey := telemetrytypes.GetFieldKeyFromKeyText(column)
|
||||
|
||||
orderBy := []qbtypes.OrderBy{
|
||||
{
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: orderByKey,
|
||||
},
|
||||
Direction: orderDirection,
|
||||
},
|
||||
}
|
||||
|
||||
// If we are ordering by the timestamp column, also order by the ID column
|
||||
if orderByKey.Name == telemetrylogs.LogsV2TimestampColumn {
|
||||
orderBy = append(orderBy, qbtypes.OrderBy{
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
Direction: orderDirection,
|
||||
})
|
||||
}
|
||||
return orderBy, nil
|
||||
}
|
||||
|
||||
@@ -2,84 +2,58 @@ package implrawdataexport
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/types/exporttypes"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestExportRawDataFormatQueryParam_BindingDefaults(t *testing.T) {
|
||||
var params exporttypes.ExportRawDataFormatQueryParam
|
||||
err := binding.Query.BindQuery(url.Values{}, ¶ms)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "csv", params.Format)
|
||||
}
|
||||
|
||||
func logQuery(limit int) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{Limit: limit},
|
||||
}
|
||||
}
|
||||
|
||||
func traceQuery(limit int) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{Limit: limit},
|
||||
}
|
||||
}
|
||||
|
||||
func traceOperatorQuery(limit int) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeTraceOperator,
|
||||
Spec: qbtypes.QueryBuilderTraceOperator{Limit: limit, Expression: "A"},
|
||||
}
|
||||
}
|
||||
|
||||
func makeRequest(queries ...qbtypes.QueryEnvelope) qbtypes.QueryRangeRequest {
|
||||
return qbtypes.QueryRangeRequest{
|
||||
Start: 1000000000000,
|
||||
End: 1000003600000,
|
||||
RequestType: qbtypes.RequestTypeRaw,
|
||||
CompositeQuery: qbtypes.CompositeQuery{Queries: queries},
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateSpecForExport(t *testing.T) {
|
||||
func TestGetExportQuerySource(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
req qbtypes.QueryRangeRequest
|
||||
expectedError bool
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedSource string
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "single log query",
|
||||
req: makeRequest(logQuery(0)),
|
||||
name: "default logs source",
|
||||
queryParams: url.Values{},
|
||||
expectedSource: "logs",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "single trace query",
|
||||
req: makeRequest(traceQuery(0)),
|
||||
name: "explicit logs source",
|
||||
queryParams: url.Values{"source": {"logs"}},
|
||||
expectedSource: "logs",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "trace operator alone",
|
||||
req: makeRequest(traceOperatorQuery(0)),
|
||||
name: "metrics source - not supported",
|
||||
queryParams: url.Values{"source": {"metrics"}},
|
||||
expectedSource: "metrics",
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "multiple queries without trace operator",
|
||||
req: makeRequest(logQuery(0), traceQuery(0)),
|
||||
expectedError: true,
|
||||
name: "traces source - not supported",
|
||||
queryParams: url.Values{"source": {"traces"}},
|
||||
expectedSource: "traces",
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "unsupported query type",
|
||||
req: makeRequest(qbtypes.QueryEnvelope{Type: qbtypes.QueryTypeBuilder, Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{}}),
|
||||
expectedError: true,
|
||||
name: "invalid source",
|
||||
queryParams: url.Values{"source": {"invalid"}},
|
||||
expectedSource: "",
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := validateSpecForExport(&tt.req)
|
||||
source, err := getExportQuerySource(tt.queryParams)
|
||||
assert.Equal(t, tt.expectedSource, source)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
@@ -89,69 +63,456 @@ func TestValidateSpecForExport(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateAndApplyDefaultExportLimits(t *testing.T) {
|
||||
func TestGetExportQueryFormat(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queries []qbtypes.QueryEnvelope
|
||||
expectedError bool
|
||||
checkQueries func(t *testing.T, queries []qbtypes.QueryEnvelope)
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedFormat string
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "single log query, zero limit gets default",
|
||||
queries: makeRequest(logQuery(0)).CompositeQuery.Queries,
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, DefaultExportRowCountLimit, q[0].GetLimit())
|
||||
},
|
||||
name: "default csv format",
|
||||
queryParams: url.Values{},
|
||||
expectedFormat: "csv",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "single log query, valid limit kept",
|
||||
queries: makeRequest(logQuery(1000)).CompositeQuery.Queries,
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, 1000, q[0].GetLimit())
|
||||
},
|
||||
name: "explicit csv format",
|
||||
queryParams: url.Values{"format": {"csv"}},
|
||||
expectedFormat: "csv",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "single log query, max limit kept",
|
||||
queries: makeRequest(logQuery(MaxExportRowCountLimit)).CompositeQuery.Queries,
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, MaxExportRowCountLimit, q[0].GetLimit())
|
||||
},
|
||||
name: "jsonl format",
|
||||
queryParams: url.Values{"format": {"jsonl"}},
|
||||
expectedFormat: "jsonl",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "single log query, limit exceeds max",
|
||||
queries: makeRequest(logQuery(MaxExportRowCountLimit + 1)).CompositeQuery.Queries,
|
||||
name: "invalid format",
|
||||
queryParams: url.Values{"format": {"xml"}},
|
||||
expectedFormat: "",
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
format, err := getExportQueryFormat(tt.queryParams)
|
||||
assert.Equal(t, tt.expectedFormat, format)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryLimit(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedLimit int
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "default limit",
|
||||
queryParams: url.Values{},
|
||||
expectedLimit: DefaultExportRowCountLimit,
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "valid limit",
|
||||
queryParams: url.Values{"limit": {"5000"}},
|
||||
expectedLimit: 5000,
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "maximum limit",
|
||||
queryParams: url.Values{"limit": {strconv.Itoa(MaxExportRowCountLimit)}},
|
||||
expectedLimit: MaxExportRowCountLimit,
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "limit exceeds maximum",
|
||||
queryParams: url.Values{"limit": {"100000"}},
|
||||
expectedLimit: 0,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "single log query, negative limit",
|
||||
queries: makeRequest(logQuery(-1)).CompositeQuery.Queries,
|
||||
name: "invalid limit format",
|
||||
queryParams: url.Values{"limit": {"invalid"}},
|
||||
expectedLimit: 0,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "single trace query, zero limit gets default",
|
||||
queries: makeRequest(traceQuery(0)).CompositeQuery.Queries,
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, DefaultExportRowCountLimit, q[0].GetLimit())
|
||||
name: "negative limit",
|
||||
queryParams: url.Values{"limit": {"-100"}},
|
||||
expectedLimit: 0,
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
limit, err := getExportQueryLimit(tt.queryParams)
|
||||
assert.Equal(t, tt.expectedLimit, limit)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryTimeRange(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedStartTime uint64
|
||||
expectedEndTime uint64
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "valid time range",
|
||||
queryParams: url.Values{
|
||||
"start": {"1640995200"},
|
||||
"end": {"1641081600"},
|
||||
},
|
||||
expectedStartTime: 1640995200,
|
||||
expectedEndTime: 1641081600,
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "missing start time",
|
||||
queryParams: url.Values{"end": {"1641081600"}},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "missing end time",
|
||||
queryParams: url.Values{"start": {"1640995200"}},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "missing both times",
|
||||
queryParams: url.Values{},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "invalid start time format",
|
||||
queryParams: url.Values{
|
||||
"start": {"invalid"},
|
||||
"end": {"1641081600"},
|
||||
},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "invalid end time format",
|
||||
queryParams: url.Values{
|
||||
"start": {"1640995200"},
|
||||
"end": {"invalid"},
|
||||
},
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
startTime, endTime, err := getExportQueryTimeRange(tt.queryParams)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expectedStartTime, startTime)
|
||||
assert.Equal(t, tt.expectedEndTime, endTime)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryColumns(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedColumns []telemetrytypes.TelemetryFieldKey
|
||||
}{
|
||||
{
|
||||
name: "no columns specified",
|
||||
queryParams: url.Values{},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{},
|
||||
},
|
||||
{
|
||||
name: "single column",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "trace operator alone, zero limit gets default",
|
||||
queries: makeRequest(traceOperatorQuery(0)).CompositeQuery.Queries,
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, DefaultExportRowCountLimit, q[0].GetLimit())
|
||||
name: "multiple columns",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", "message", "level"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "message"},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "empty column name (should be skipped)",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", "", "level"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "whitespace column name (should be skipped)",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", " ", "level"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "valid column name with data type",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", "attribute.user:string", "level"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "user", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "valid column name with dot notation",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", "attribute.user.string", "level"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "user.string", FieldContext: telemetrytypes.FieldContextAttribute},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := validateAndApplyDefaultExportLimits(tt.queries)
|
||||
columns := getExportQueryColumns(tt.queryParams)
|
||||
assert.Equal(t, len(tt.expectedColumns), len(columns))
|
||||
for i, expectedCol := range tt.expectedColumns {
|
||||
assert.Equal(t, expectedCol, columns[i])
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryOrderBy(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedOrder []qbtypes.OrderBy
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "no order specified",
|
||||
queryParams: url.Values{},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "single order error, direction not specified",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp"},
|
||||
},
|
||||
expectedOrder: nil,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "single order no error",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "multiple orders",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp:asc", "body:desc", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "empty order name (should be skipped)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp:asc", "", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "whitespace order name (should be skipped)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp:asc", " ", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "invalid order name (should error out)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"attributes.user:", "id:asc"},
|
||||
},
|
||||
expectedOrder: nil,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "valid order name (should be included)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"attribute.user:string:desc", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "user",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "valid order name (should be included)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"attribute.user.string:desc", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "user.string",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
order, err := getExportQueryOrderBy(tt.queryParams)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
if tt.checkQueries != nil {
|
||||
tt.checkQueries(t, tt.queries)
|
||||
assert.Equal(t, len(tt.expectedOrder), len(order))
|
||||
for i, expectedOrd := range tt.expectedOrder {
|
||||
assert.Equal(t, expectedOrd, order[i])
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -168,8 +529,13 @@ func TestConstructCSVHeaderFromQueryResponse(t *testing.T) {
|
||||
|
||||
header := constructCSVHeaderFromQueryResponse(data)
|
||||
|
||||
// Priority columns come first in order, then the rest alphabetically.
|
||||
assert.Equal(t, []string{"timestamp", "id", "level", "message"}, header)
|
||||
// Since map iteration order is not guaranteed, check that all expected keys are present
|
||||
expectedKeys := []string{"timestamp", "message", "level", "id"}
|
||||
assert.Equal(t, len(expectedKeys), len(header))
|
||||
|
||||
for _, key := range expectedKeys {
|
||||
assert.Contains(t, header, key)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConstructCSVRecordFromQueryResponse(t *testing.T) {
|
||||
|
||||
@@ -28,18 +28,8 @@ func (m *Module) ExportRawData(ctx context.Context, orgID valuer.UUID, rangeRequ
|
||||
instrumentationtypes.CodeFunctionName: "ExportRawData",
|
||||
})
|
||||
|
||||
traceOperatorQueryIndex := rangeRequest.TraceOperatorQueryIndex()
|
||||
|
||||
queries := rangeRequest.CompositeQuery.Queries
|
||||
|
||||
// If the trace operator query is present, mark the queries other than trace operator as disabled
|
||||
if traceOperatorQueryIndex > -1 {
|
||||
for idx := range len(queries) {
|
||||
if idx != traceOperatorQueryIndex {
|
||||
queries[idx].SetDisabled(true)
|
||||
}
|
||||
}
|
||||
}
|
||||
spec := rangeRequest.CompositeQuery.Queries[0].Spec.(qbtypes.QueryBuilderQuery[qbtypes.LogAggregation])
|
||||
rowCountLimit := spec.Limit
|
||||
|
||||
rowChan := make(chan *qbtypes.RawRow, 1)
|
||||
errChan := make(chan error, 1)
|
||||
@@ -53,62 +43,52 @@ func (m *Module) ExportRawData(ctx context.Context, orgID valuer.UUID, rangeRequ
|
||||
defer close(errChan)
|
||||
defer close(rowChan)
|
||||
|
||||
if traceOperatorQueryIndex > -1 {
|
||||
// If the trace operator query is present, we need to export the data for the trace operator query only
|
||||
exportRawDataForSingleQuery(m.querier, contextWithTimeout, orgID, rangeRequest, rowChan, errChan, doneChan, traceOperatorQueryIndex)
|
||||
} else {
|
||||
// If the trace operator query is not present, we need to export the data for the first query only
|
||||
exportRawDataForSingleQuery(m.querier, contextWithTimeout, orgID, rangeRequest, rowChan, errChan, doneChan, 0)
|
||||
rowCount := 0
|
||||
|
||||
for rowCount < rowCountLimit {
|
||||
spec.Limit = min(ChunkSize, rowCountLimit-rowCount)
|
||||
spec.Offset = rowCount
|
||||
|
||||
rangeRequest.CompositeQuery.Queries[0].Spec = spec
|
||||
|
||||
response, err := m.querier.QueryRange(contextWithTimeout, orgID, rangeRequest)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
return
|
||||
}
|
||||
|
||||
newRowsCount := 0
|
||||
for _, result := range response.Data.Results {
|
||||
resultData, ok := result.(*qbtypes.RawData)
|
||||
if !ok {
|
||||
errChan <- errors.NewInternalf(errors.CodeInternal, "expected RawData, got %T", result)
|
||||
return
|
||||
}
|
||||
|
||||
newRowsCount += len(resultData.Rows)
|
||||
for _, row := range resultData.Rows {
|
||||
select {
|
||||
case rowChan <- row:
|
||||
case <-doneChan:
|
||||
return
|
||||
case <-ctx.Done():
|
||||
errChan <- ctx.Err()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Break if we did not receive any new rows
|
||||
if newRowsCount == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
rowCount += newRowsCount
|
||||
|
||||
}
|
||||
}()
|
||||
|
||||
return rowChan, errChan
|
||||
|
||||
}
|
||||
|
||||
func exportRawDataForSingleQuery(querier querier.Querier, ctx context.Context, orgID valuer.UUID, rangeRequest *qbtypes.QueryRangeRequest, rowChan chan *qbtypes.RawRow, errChan chan error, doneChan chan any, queryIndex int) {
|
||||
|
||||
queries := rangeRequest.CompositeQuery.Queries
|
||||
rowCountLimit := queries[queryIndex].GetLimit()
|
||||
rowCount := 0
|
||||
|
||||
for rowCount < rowCountLimit {
|
||||
chunkSize := min(ChunkSize, rowCountLimit-rowCount)
|
||||
queries[queryIndex].SetLimit(chunkSize)
|
||||
queries[queryIndex].SetOffset(rowCount)
|
||||
|
||||
response, err := querier.QueryRange(ctx, orgID, rangeRequest)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
return
|
||||
}
|
||||
|
||||
newRowsCount := 0
|
||||
for _, result := range response.Data.Results {
|
||||
resultData, ok := result.(*qbtypes.RawData)
|
||||
if !ok {
|
||||
errChan <- errors.NewInternalf(errors.CodeInternal, "expected RawData, got %T", result)
|
||||
return
|
||||
}
|
||||
|
||||
newRowsCount += len(resultData.Rows)
|
||||
for _, row := range resultData.Rows {
|
||||
select {
|
||||
case rowChan <- row:
|
||||
case <-doneChan:
|
||||
return
|
||||
case <-ctx.Done():
|
||||
errChan <- ctx.Err()
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rowCount += newRowsCount
|
||||
|
||||
// Stop if we received fewer rows than requested — no more data available
|
||||
if newRowsCount < chunkSize {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -158,7 +158,7 @@ func (module *module) CreateCallbackAuthNSession(ctx context.Context, authNProvi
|
||||
return "", errors.WithAdditionalf(err, "root user can only authenticate via password")
|
||||
}
|
||||
|
||||
token, err := module.tokenizer.CreateToken(ctx, authtypes.NewIdentity(user.ID, user.OrgID, user.Email, user.Role), map[string]string{})
|
||||
token, err := module.tokenizer.CreateToken(ctx, authtypes.NewIdentity(user.ID, user.OrgID, user.Email, user.Role, authtypes.IdentNProviderTokenizer), map[string]string{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -7,13 +7,14 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/opamptypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
@@ -121,7 +122,7 @@ func (r *Repo) insertConfig(
|
||||
|
||||
// allowing empty elements for logs - use case is deleting all pipelines
|
||||
if len(elements) == 0 && c.ElementType != opamptypes.ElementTypeLogPipelines {
|
||||
zap.L().Error("insert config called with no elements ", zap.String("ElementType", c.ElementType.StringValue()))
|
||||
slog.ErrorContext(ctx, "insert config called with no elements", "element_type", c.ElementType.StringValue())
|
||||
return errors.NewInvalidInputf(CodeConfigElementsRequired, "config must have atleast one element")
|
||||
}
|
||||
|
||||
@@ -129,13 +130,13 @@ func (r *Repo) insertConfig(
|
||||
// the version can not be set by the user, we want to auto-assign the versions
|
||||
// in a monotonically increasing order starting with 1. hence, we reject insert
|
||||
// requests with version anything other than 0. here, 0 indicates un-assigned
|
||||
zap.L().Error("invalid version assignment while inserting agent config", zap.Int("version", c.Version), zap.String("ElementType", c.ElementType.StringValue()))
|
||||
slog.ErrorContext(ctx, "invalid version assignment while inserting agent config", "version", c.Version, "element_type", c.ElementType.StringValue())
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "user defined versions are not supported in the agent config")
|
||||
}
|
||||
|
||||
configVersion, err := r.GetLatestVersion(ctx, orgId, c.ElementType)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
zap.L().Error("failed to fetch latest config version", zap.Error(err))
|
||||
slog.ErrorContext(ctx, "failed to fetch latest config version", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -155,11 +156,11 @@ func (r *Repo) insertConfig(
|
||||
// Delete elements first, then version (to respect potential foreign key constraints)
|
||||
_, delErr := r.store.BunDB().NewDelete().Model(new(opamptypes.AgentConfigElement)).Where("version_id = ?", c.ID).Exec(ctx)
|
||||
if delErr != nil {
|
||||
zap.L().Error("failed to delete config elements during cleanup", zap.Error(delErr), zap.String("version_id", c.ID.String()))
|
||||
slog.ErrorContext(ctx, "failed to delete config elements during cleanup", "error", delErr, "version_id", c.ID.String())
|
||||
}
|
||||
_, delErr = r.store.BunDB().NewDelete().Model(new(opamptypes.AgentConfigVersion)).Where("id = ?", c.ID).Where("org_id = ?", orgId).Exec(ctx)
|
||||
if delErr != nil {
|
||||
zap.L().Error("failed to delete config version during cleanup", zap.Error(delErr), zap.String("version_id", c.ID.String()))
|
||||
slog.ErrorContext(ctx, "failed to delete config version during cleanup", "error", delErr, "version_id", c.ID.String())
|
||||
}
|
||||
}
|
||||
}()
|
||||
@@ -170,7 +171,7 @@ func (r *Repo) insertConfig(
|
||||
Model(c).
|
||||
Exec(ctx)
|
||||
if dbErr != nil {
|
||||
zap.L().Error("error in inserting config version: ", zap.Error(dbErr))
|
||||
slog.ErrorContext(ctx, "error in inserting config version", "error", dbErr)
|
||||
return errors.WrapInternalf(dbErr, CodeConfigVersionInsertFailed, "failed to insert config version")
|
||||
}
|
||||
|
||||
@@ -221,7 +222,7 @@ func (r *Repo) updateDeployStatus(ctx context.Context,
|
||||
Where("org_id = ?", orgId).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to update deploy status", zap.Error(err))
|
||||
slog.ErrorContext(ctx, "failed to update deploy status", "error", err)
|
||||
return model.BadRequest(fmt.Errorf("failed to update deploy status"))
|
||||
}
|
||||
|
||||
@@ -239,7 +240,7 @@ func (r *Repo) updateDeployStatusByHash(
|
||||
Where("org_id = ?", orgId).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to update deploy status", zap.Error(err))
|
||||
slog.ErrorContext(ctx, "failed to update deploy status", "error", err)
|
||||
return errors.WrapInternalf(err, CodeConfigDeployStatusUpdateFailed, "failed to update deploy status")
|
||||
}
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
@@ -17,7 +18,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/opamptypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"go.uber.org/zap"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
@@ -36,7 +36,8 @@ type AgentFeatureType string
|
||||
type Manager struct {
|
||||
Repo
|
||||
// lock to make sure only one update is sent to remote agents at a time
|
||||
lock uint32
|
||||
lock uint32
|
||||
logger *slog.Logger
|
||||
|
||||
// For AgentConfigProvider implementation
|
||||
agentFeatures []AgentFeature
|
||||
@@ -67,6 +68,7 @@ func Initiate(options *ManagerOptions) (*Manager, error) {
|
||||
|
||||
m = &Manager{
|
||||
Repo: Repo{options.Store},
|
||||
logger: slog.Default(),
|
||||
agentFeatures: options.AgentFeatures,
|
||||
configSubscribers: map[string]func(){},
|
||||
}
|
||||
@@ -222,19 +224,19 @@ func NotifyConfigUpdate(ctx context.Context) {
|
||||
func Redeploy(ctx context.Context, orgId valuer.UUID, typ opamptypes.ElementType, version int) error {
|
||||
configVersion, err := GetConfigVersion(ctx, orgId, typ, version)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to fetch config version during redeploy", zap.Error(err))
|
||||
slog.ErrorContext(ctx, "failed to fetch config version during redeploy", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
if configVersion == nil || (configVersion != nil && configVersion.Config == "") {
|
||||
zap.L().Debug("config version has no conf yaml", zap.Any("configVersion", configVersion))
|
||||
slog.DebugContext(ctx, "config version has no conf yaml", "config_version", configVersion)
|
||||
return errors.NewInvalidInputf(CodeConfigVersionNoConfig, "the config version can not be redeployed")
|
||||
}
|
||||
switch typ {
|
||||
case opamptypes.ElementTypeSamplingRules:
|
||||
var config *tsp.Config
|
||||
if err := yaml.Unmarshal([]byte(configVersion.Config), &config); err != nil {
|
||||
zap.L().Debug("failed to read last conf correctly", zap.Error(err))
|
||||
slog.DebugContext(ctx, "failed to read last conf correctly", "error", err)
|
||||
return model.BadRequest(fmt.Errorf("failed to read the stored config correctly"))
|
||||
}
|
||||
|
||||
@@ -246,7 +248,7 @@ func Redeploy(ctx context.Context, orgId valuer.UUID, typ opamptypes.ElementType
|
||||
opamp.AddToTracePipelineSpec("signoz_tail_sampling")
|
||||
configHash, err := opamp.UpsertControlProcessors(ctx, "traces", processorConf, m.OnConfigUpdate)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to call agent config update for trace processor", zap.Error(err))
|
||||
slog.ErrorContext(ctx, "failed to call agent config update for trace processor", "error", err)
|
||||
return errors.WithAdditionalf(err, "failed to deploy the config")
|
||||
}
|
||||
|
||||
@@ -254,7 +256,7 @@ func Redeploy(ctx context.Context, orgId valuer.UUID, typ opamptypes.ElementType
|
||||
case opamptypes.ElementTypeDropRules:
|
||||
var filterConfig *filterprocessor.Config
|
||||
if err := yaml.Unmarshal([]byte(configVersion.Config), &filterConfig); err != nil {
|
||||
zap.L().Error("failed to read last conf correctly", zap.Error(err))
|
||||
slog.ErrorContext(ctx, "failed to read last conf correctly", "error", err)
|
||||
return model.InternalError(fmt.Errorf("failed to read the stored config correctly"))
|
||||
}
|
||||
processorConf := map[string]interface{}{
|
||||
@@ -264,7 +266,7 @@ func Redeploy(ctx context.Context, orgId valuer.UUID, typ opamptypes.ElementType
|
||||
opamp.AddToMetricsPipelineSpec("filter")
|
||||
configHash, err := opamp.UpsertControlProcessors(ctx, "metrics", processorConf, m.OnConfigUpdate)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to call agent config update for trace processor", zap.Error(err))
|
||||
slog.ErrorContext(ctx, "failed to call agent config update for trace processor", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -290,13 +292,13 @@ func UpsertFilterProcessor(ctx context.Context, orgId valuer.UUID, version int,
|
||||
opamp.AddToMetricsPipelineSpec("filter")
|
||||
configHash, err := opamp.UpsertControlProcessors(ctx, "metrics", processorConf, m.OnConfigUpdate)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to call agent config update for trace processor", zap.Error(err))
|
||||
slog.ErrorContext(ctx, "failed to call agent config update for trace processor", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
processorConfYaml, yamlErr := yaml.Marshal(config)
|
||||
if yamlErr != nil {
|
||||
zap.L().Warn("unexpected error while transforming processor config to yaml", zap.Error(yamlErr))
|
||||
slog.WarnContext(ctx, "unexpected error while transforming processor config to yaml", "error", yamlErr)
|
||||
}
|
||||
|
||||
m.updateDeployStatus(ctx, orgId, opamptypes.ElementTypeDropRules, version, opamptypes.DeployInitiated.StringValue(), "Deployment started", configHash, string(processorConfYaml))
|
||||
@@ -315,7 +317,7 @@ func (m *Manager) OnConfigUpdate(orgId valuer.UUID, agentId string, hash string,
|
||||
message := "Deployment was successful"
|
||||
|
||||
defer func() {
|
||||
zap.L().Info(status, zap.String("agentId", agentId), zap.String("agentResponse", message))
|
||||
m.logger.Info(status, "agent_id", agentId, "agent_response", message)
|
||||
}()
|
||||
|
||||
if err != nil {
|
||||
@@ -341,13 +343,13 @@ func UpsertSamplingProcessor(ctx context.Context, orgId valuer.UUID, version int
|
||||
opamp.AddToTracePipelineSpec("signoz_tail_sampling")
|
||||
configHash, err := opamp.UpsertControlProcessors(ctx, "traces", processorConf, m.OnConfigUpdate)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to call agent config update for trace processor", zap.Error(err))
|
||||
slog.ErrorContext(ctx, "failed to call agent config update for trace processor", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
processorConfYaml, yamlErr := yaml.Marshal(config)
|
||||
if yamlErr != nil {
|
||||
zap.L().Warn("unexpected error while transforming processor config to yaml", zap.Error(yamlErr))
|
||||
slog.WarnContext(ctx, "unexpected error while transforming processor config to yaml", "error", yamlErr)
|
||||
}
|
||||
|
||||
m.updateDeployStatus(ctx, orgId, opamptypes.ElementTypeSamplingRules, version, opamptypes.DeployInitiated.StringValue(), "Deployment started", configHash, string(processorConfYaml))
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
"github.com/SigNoz/signoz-otel-collector/utils/fingerprint"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (r *ClickHouseReader) GetQBFilterSuggestionsForLogs(
|
||||
@@ -79,7 +78,7 @@ func (r *ClickHouseReader) GetQBFilterSuggestionsForLogs(
|
||||
)
|
||||
if err != nil {
|
||||
// Do not fail the entire request if only example query generation fails
|
||||
zap.L().Error("could not find attribute values for creating example query", zap.Error(err))
|
||||
r.logger.ErrorContext(ctx, "could not find attribute values for creating example query", "error", err)
|
||||
} else {
|
||||
|
||||
// add example queries for as many attributes as possible.
|
||||
@@ -159,10 +158,7 @@ func (r *ClickHouseReader) getValuesForLogAttributes(
|
||||
*/
|
||||
|
||||
if len(attributes) > 10 {
|
||||
zap.L().Error(
|
||||
"log attribute values requested for too many attributes. This can lead to slow and costly queries",
|
||||
zap.Int("count", len(attributes)),
|
||||
)
|
||||
r.logger.ErrorContext(ctx, "log attribute values requested for too many attributes. This can lead to slow and costly queries", "count", len(attributes))
|
||||
attributes = attributes[:10]
|
||||
}
|
||||
|
||||
@@ -187,7 +183,7 @@ func (r *ClickHouseReader) getValuesForLogAttributes(
|
||||
|
||||
rows, err := r.db.Query(ctx, query, tagKeyQueryArgs...)
|
||||
if err != nil {
|
||||
zap.L().Error("couldn't query attrib values for suggestions", zap.Error(err))
|
||||
r.logger.ErrorContext(ctx, "couldn't query attrib values for suggestions", "error", err)
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"couldn't query attrib values for suggestions: %w", err,
|
||||
))
|
||||
|
||||
@@ -2,17 +2,18 @@ package queryprogress
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"sync"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/google/uuid"
|
||||
"go.uber.org/zap"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
// tracks progress and manages subscriptions for all queries
|
||||
type inMemoryQueryProgressTracker struct {
|
||||
logger *slog.Logger
|
||||
queries map[string]*queryTracker
|
||||
lock sync.RWMutex
|
||||
}
|
||||
@@ -30,7 +31,7 @@ func (tracker *inMemoryQueryProgressTracker) ReportQueryStarted(
|
||||
))
|
||||
}
|
||||
|
||||
tracker.queries[queryId] = newQueryTracker(queryId)
|
||||
tracker.queries[queryId] = newQueryTracker(tracker.logger, queryId)
|
||||
|
||||
return func() {
|
||||
tracker.onQueryFinished(queryId)
|
||||
@@ -93,6 +94,7 @@ func (tracker *inMemoryQueryProgressTracker) getQueryTracker(
|
||||
|
||||
// Tracks progress and manages subscriptions for a single query
|
||||
type queryTracker struct {
|
||||
logger *slog.Logger
|
||||
queryId string
|
||||
isFinished bool
|
||||
|
||||
@@ -102,8 +104,9 @@ type queryTracker struct {
|
||||
lock sync.Mutex
|
||||
}
|
||||
|
||||
func newQueryTracker(queryId string) *queryTracker {
|
||||
func newQueryTracker(logger *slog.Logger, queryId string) *queryTracker {
|
||||
return &queryTracker{
|
||||
logger: logger,
|
||||
queryId: queryId,
|
||||
subscriptions: map[string]*queryProgressSubscription{},
|
||||
}
|
||||
@@ -114,10 +117,7 @@ func (qt *queryTracker) handleProgressUpdate(p *clickhouse.Progress) {
|
||||
defer qt.lock.Unlock()
|
||||
|
||||
if qt.isFinished {
|
||||
zap.L().Warn(
|
||||
"received clickhouse progress update for finished query",
|
||||
zap.String("queryId", qt.queryId), zap.Any("progress", p),
|
||||
)
|
||||
qt.logger.Warn("received clickhouse progress update for finished query", "queryId", qt.queryId, "progress", p)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -146,7 +146,7 @@ func (qt *queryTracker) subscribe() (
|
||||
}
|
||||
|
||||
subscriberId := uuid.NewString()
|
||||
subscription := newQueryProgressSubscription()
|
||||
subscription := newQueryProgressSubscription(qt.logger)
|
||||
qt.subscriptions[subscriberId] = subscription
|
||||
|
||||
if qt.progress != nil {
|
||||
@@ -163,11 +163,7 @@ func (qt *queryTracker) unsubscribe(subscriberId string) {
|
||||
defer qt.lock.Unlock()
|
||||
|
||||
if qt.isFinished {
|
||||
zap.L().Debug(
|
||||
"received unsubscribe request after query finished",
|
||||
zap.String("subscriber", subscriberId),
|
||||
zap.String("queryId", qt.queryId),
|
||||
)
|
||||
qt.logger.Debug("received unsubscribe request after query finished", "subscriber", subscriberId, "queryId", qt.queryId)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -183,10 +179,7 @@ func (qt *queryTracker) onFinished() {
|
||||
defer qt.lock.Unlock()
|
||||
|
||||
if qt.isFinished {
|
||||
zap.L().Warn(
|
||||
"receiver query finish report after query finished",
|
||||
zap.String("queryId", qt.queryId),
|
||||
)
|
||||
qt.logger.Warn("receiver query finish report after query finished", "queryId", qt.queryId)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -199,15 +192,17 @@ func (qt *queryTracker) onFinished() {
|
||||
}
|
||||
|
||||
type queryProgressSubscription struct {
|
||||
logger *slog.Logger
|
||||
ch chan model.QueryProgress
|
||||
isClosed bool
|
||||
lock sync.Mutex
|
||||
}
|
||||
|
||||
func newQueryProgressSubscription() *queryProgressSubscription {
|
||||
func newQueryProgressSubscription(logger *slog.Logger) *queryProgressSubscription {
|
||||
ch := make(chan model.QueryProgress, 1000)
|
||||
return &queryProgressSubscription{
|
||||
ch: ch,
|
||||
logger: logger,
|
||||
ch: ch,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -217,10 +212,7 @@ func (ch *queryProgressSubscription) send(progress model.QueryProgress) {
|
||||
defer ch.lock.Unlock()
|
||||
|
||||
if ch.isClosed {
|
||||
zap.L().Error(
|
||||
"can't send query progress: channel already closed.",
|
||||
zap.Any("progress", progress),
|
||||
)
|
||||
ch.logger.Error("can't send query progress: channel already closed.", "progress", progress)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -228,12 +220,9 @@ func (ch *queryProgressSubscription) send(progress model.QueryProgress) {
|
||||
// blocking while sending doesn't happen in the happy path
|
||||
select {
|
||||
case ch.ch <- progress:
|
||||
zap.L().Debug("published query progress", zap.Any("progress", progress))
|
||||
ch.logger.Debug("published query progress", "progress", progress)
|
||||
default:
|
||||
zap.L().Error(
|
||||
"couldn't publish query progress. dropping update.",
|
||||
zap.Any("progress", progress),
|
||||
)
|
||||
ch.logger.Error("couldn't publish query progress. dropping update.", "progress", progress)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
package queryprogress
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
)
|
||||
@@ -21,10 +23,11 @@ type QueryProgressTracker interface {
|
||||
SubscribeToQueryProgress(queryId string) (ch <-chan model.QueryProgress, unsubscribe func(), apiErr *model.ApiError)
|
||||
}
|
||||
|
||||
func NewQueryProgressTracker() QueryProgressTracker {
|
||||
func NewQueryProgressTracker(logger *slog.Logger) QueryProgressTracker {
|
||||
// InMemory tracker is useful only for single replica query service setups.
|
||||
// Multi replica setups must use a centralized store for tracking and subscribing to query progress
|
||||
return &inMemoryQueryProgressTracker{
|
||||
logger: logger,
|
||||
queries: map[string]*queryTracker{},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package queryprogress
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@@ -12,7 +13,7 @@ import (
|
||||
func TestQueryProgressTracking(t *testing.T) {
|
||||
require := require.New(t)
|
||||
|
||||
tracker := NewQueryProgressTracker()
|
||||
tracker := NewQueryProgressTracker(slog.Default())
|
||||
|
||||
testQueryId := "test-query"
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -13,6 +13,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
|
||||
"io"
|
||||
"log/slog"
|
||||
"math"
|
||||
"net/http"
|
||||
"regexp"
|
||||
@@ -73,8 +74,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunneltypes"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/kafka"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
@@ -97,6 +96,7 @@ func NewRouter() *mux.Router {
|
||||
|
||||
// APIHandler implements the query service public API
|
||||
type APIHandler struct {
|
||||
logger *slog.Logger
|
||||
reader interfaces.Reader
|
||||
ruleManager *rules.Manager
|
||||
querier interfaces.Querier
|
||||
@@ -212,6 +212,7 @@ func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, erro
|
||||
//quickFilterModule := quickfilter.NewAPI(opts.QuickFilterModule)
|
||||
|
||||
aH := &APIHandler{
|
||||
logger: slog.Default(),
|
||||
reader: opts.Reader,
|
||||
temporalityMap: make(map[string]map[v3.Temporality]bool),
|
||||
ruleManager: opts.RuleManager,
|
||||
@@ -251,13 +252,13 @@ func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, erro
|
||||
// TODO(nitya): remote this in later for multitenancy.
|
||||
orgs, err := opts.Signoz.Modules.OrgGetter.ListByOwnedKeyRange(context.Background())
|
||||
if err != nil {
|
||||
zap.L().Warn("unexpected error while fetching orgs while initializing base api handler", zap.Error(err))
|
||||
aH.logger.Warn("unexpected error while fetching orgs while initializing base api handler", "error", err)
|
||||
}
|
||||
// if the first org with the first user is created then the setup is complete.
|
||||
if len(orgs) == 1 {
|
||||
count, err := opts.Signoz.Modules.UserGetter.CountByOrgID(context.Background(), orgs[0].ID)
|
||||
if err != nil {
|
||||
zap.L().Warn("unexpected error while fetch user count while initializing base api handler", zap.Error(err))
|
||||
aH.logger.Warn("unexpected error while fetching user count while initializing base api handler", "error", err)
|
||||
}
|
||||
|
||||
if count > 0 {
|
||||
@@ -312,7 +313,7 @@ func RespondError(w http.ResponseWriter, apiErr model.BaseApiError, data interfa
|
||||
Data: data,
|
||||
})
|
||||
if err != nil {
|
||||
zap.L().Error("error marshalling json response", zap.Error(err))
|
||||
slog.Error("error marshalling json response", "error", err)
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
@@ -344,7 +345,7 @@ func RespondError(w http.ResponseWriter, apiErr model.BaseApiError, data interfa
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(code)
|
||||
if n, err := w.Write(b); err != nil {
|
||||
zap.L().Error("error writing response", zap.Int("bytesWritten", n), zap.Error(err))
|
||||
slog.Error("error writing response", "bytes_written", n, "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -356,7 +357,7 @@ func writeHttpResponse(w http.ResponseWriter, data interface{}) {
|
||||
Data: data,
|
||||
})
|
||||
if err != nil {
|
||||
zap.L().Error("error marshalling json response", zap.Error(err))
|
||||
slog.Error("error marshalling json response", "error", err)
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
@@ -364,7 +365,7 @@ func writeHttpResponse(w http.ResponseWriter, data interface{}) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
if n, err := w.Write(b); err != nil {
|
||||
zap.L().Error("error writing response", zap.Int("bytesWritten", n), zap.Error(err))
|
||||
slog.Error("error writing response", "bytes_written", n, "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -574,6 +575,9 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
aH.LicensingAPI.Activate(rw, req)
|
||||
})).Methods(http.MethodGet)
|
||||
|
||||
// Export
|
||||
router.HandleFunc("/api/v1/export_raw_data", am.ViewAccess(aH.Signoz.Handlers.RawDataExport.ExportRawData)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/span_percentile", am.ViewAccess(aH.Signoz.Handlers.SpanPercentile.GetSpanPercentileDetails)).Methods(http.MethodPost)
|
||||
|
||||
// Query Filter Analyzer api used to extract metric names and grouping columns from a query
|
||||
@@ -933,14 +937,14 @@ func (aH *APIHandler) metaForLinks(ctx context.Context, rule *ruletypes.Gettable
|
||||
}
|
||||
keys = model.GetLogFieldsV3(ctx, params, logFields)
|
||||
} else {
|
||||
zap.L().Error("failed to get log fields using empty keys; the link might not work as expected", zap.Error(apiErr))
|
||||
aH.logger.ErrorContext(ctx, "failed to get log fields using empty keys", "error", apiErr)
|
||||
}
|
||||
} else if rule.AlertType == ruletypes.AlertTypeTraces {
|
||||
traceFields, err := aH.reader.GetSpanAttributeKeysByNames(ctx, logsv3.GetFieldNames(rule.PostableRule.RuleCondition.CompositeQuery))
|
||||
if err == nil {
|
||||
keys = traceFields
|
||||
} else {
|
||||
zap.L().Error("failed to get span attributes using empty keys; the link might not work as expected", zap.Error(err))
|
||||
aH.logger.ErrorContext(ctx, "failed to get span attributes using empty keys", "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1273,14 +1277,14 @@ func (aH *APIHandler) List(rw http.ResponseWriter, r *http.Request) {
|
||||
|
||||
installedIntegrationDashboards, apiErr := aH.IntegrationsController.GetDashboardsForInstalledIntegrations(ctx, orgID)
|
||||
if apiErr != nil {
|
||||
zap.L().Error("failed to get dashboards for installed integrations", zap.Error(apiErr))
|
||||
aH.logger.ErrorContext(ctx, "failed to get dashboards for installed integrations", "error", apiErr)
|
||||
} else {
|
||||
dashboards = append(dashboards, installedIntegrationDashboards...)
|
||||
}
|
||||
|
||||
cloudIntegrationDashboards, apiErr := aH.CloudIntegrationsController.AvailableDashboards(ctx, orgID)
|
||||
if apiErr != nil {
|
||||
zap.L().Error("failed to get dashboards for cloud integrations", zap.Error(apiErr))
|
||||
aH.logger.ErrorContext(ctx, "failed to get dashboards for cloud integrations", "error", apiErr)
|
||||
} else {
|
||||
dashboards = append(dashboards, cloudIntegrationDashboards...)
|
||||
}
|
||||
@@ -1322,7 +1326,7 @@ func (aH *APIHandler) testRule(w http.ResponseWriter, r *http.Request) {
|
||||
defer r.Body.Close()
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in getting req body in test rule API", zap.Error(err))
|
||||
aH.logger.ErrorContext(r.Context(), "error reading request body for test rule", "error", err)
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
@@ -1374,7 +1378,7 @@ func (aH *APIHandler) patchRule(w http.ResponseWriter, r *http.Request) {
|
||||
defer r.Body.Close()
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
zap.L().Error("error in getting req body of patch rule API\n", zap.Error(err))
|
||||
aH.logger.ErrorContext(r.Context(), "error reading request body for patch rule", "error", err)
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
@@ -1404,7 +1408,7 @@ func (aH *APIHandler) editRule(w http.ResponseWriter, r *http.Request) {
|
||||
defer r.Body.Close()
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
zap.L().Error("error in getting req body of edit rule API", zap.Error(err))
|
||||
aH.logger.ErrorContext(r.Context(), "error reading request body for edit rule", "error", err)
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
@@ -1429,7 +1433,7 @@ func (aH *APIHandler) createRule(w http.ResponseWriter, r *http.Request) {
|
||||
defer r.Body.Close()
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in getting req body for create rule API", zap.Error(err))
|
||||
aH.logger.ErrorContext(r.Context(), "error reading request body for create rule", "error", err)
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
@@ -1453,7 +1457,7 @@ func (aH *APIHandler) queryRangeMetrics(w http.ResponseWriter, r *http.Request)
|
||||
return
|
||||
}
|
||||
|
||||
// zap.L().Info(query, apiError)
|
||||
// TODO: add structured logging for query and apiError if needed
|
||||
|
||||
ctx := r.Context()
|
||||
if to := r.FormValue("timeout"); to != "" {
|
||||
@@ -1475,7 +1479,7 @@ func (aH *APIHandler) queryRangeMetrics(w http.ResponseWriter, r *http.Request)
|
||||
}
|
||||
|
||||
if res.Err != nil {
|
||||
zap.L().Error("error in query range metrics", zap.Error(res.Err))
|
||||
aH.logger.ErrorContext(r.Context(), "error in query range metrics", "error", res.Err)
|
||||
}
|
||||
|
||||
if res.Err != nil {
|
||||
@@ -1508,7 +1512,7 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
// zap.L().Info(query, apiError)
|
||||
// TODO: add structured logging for query and apiError if needed
|
||||
|
||||
ctx := r.Context()
|
||||
if to := r.FormValue("timeout"); to != "" {
|
||||
@@ -1530,7 +1534,7 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
if res.Err != nil {
|
||||
zap.L().Error("error in query range metrics", zap.Error(res.Err))
|
||||
aH.logger.ErrorContext(r.Context(), "error in query range metrics", "error", res.Err)
|
||||
}
|
||||
|
||||
if res.Err != nil {
|
||||
@@ -1633,7 +1637,7 @@ func (aH *APIHandler) getServicesTopLevelOps(w http.ResponseWriter, r *http.Requ
|
||||
var params topLevelOpsParams
|
||||
err := json.NewDecoder(r.Body).Decode(¶ms)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in getting req body for get top operations API", zap.Error(err))
|
||||
aH.logger.ErrorContext(r.Context(), "error reading request body for get top operations", "error", err)
|
||||
}
|
||||
|
||||
if params.Service != "" {
|
||||
@@ -2055,7 +2059,7 @@ func (aH *APIHandler) HandleError(w http.ResponseWriter, err error, statusCode i
|
||||
return false
|
||||
}
|
||||
if statusCode == http.StatusInternalServerError {
|
||||
zap.L().Error("HTTP handler, Internal Server Error", zap.Error(err))
|
||||
aH.logger.Error("internal server error in http handler", "error", err)
|
||||
}
|
||||
structuredResp := structuredResponse{
|
||||
Errors: []structuredError{
|
||||
@@ -2149,7 +2153,7 @@ func (aH *APIHandler) onboardProducers(
|
||||
) {
|
||||
messagingQueue, apiErr := ParseKafkaQueueBody(r)
|
||||
if apiErr != nil {
|
||||
zap.L().Error(apiErr.Err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2157,7 +2161,7 @@ func (aH *APIHandler) onboardProducers(
|
||||
chq, err := kafka.BuildClickHouseQuery(messagingQueue, kafka.KafkaQueue, "onboard_producers")
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build clickhouse query for onboard producers", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2251,7 +2255,7 @@ func (aH *APIHandler) onboardConsumers(
|
||||
) {
|
||||
messagingQueue, apiErr := ParseKafkaQueueBody(r)
|
||||
if apiErr != nil {
|
||||
zap.L().Error(apiErr.Err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2259,7 +2263,7 @@ func (aH *APIHandler) onboardConsumers(
|
||||
chq, err := kafka.BuildClickHouseQuery(messagingQueue, kafka.KafkaQueue, "onboard_consumers")
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build clickhouse query for onboard consumers", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2398,7 +2402,7 @@ func (aH *APIHandler) onboardKafka(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
messagingQueue, apiErr := ParseKafkaQueueBody(r)
|
||||
if apiErr != nil {
|
||||
zap.L().Error(apiErr.Err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2406,7 +2410,7 @@ func (aH *APIHandler) onboardKafka(w http.ResponseWriter, r *http.Request) {
|
||||
queryRangeParams, err := kafka.BuildBuilderQueriesKafkaOnboarding(messagingQueue)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build kafka onboarding queries", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2508,19 +2512,19 @@ func (aH *APIHandler) getNetworkData(w http.ResponseWriter, r *http.Request) {
|
||||
messagingQueue, apiErr := ParseKafkaQueueBody(r)
|
||||
|
||||
if apiErr != nil {
|
||||
zap.L().Error(apiErr.Err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
queryRangeParams, err := kafka.BuildQRParamsWithCache(messagingQueue, "throughput", attributeCache)
|
||||
if err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build query range params for throughput", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to validate query range params", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2559,12 +2563,12 @@ func (aH *APIHandler) getNetworkData(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
queryRangeParams, err = kafka.BuildQRParamsWithCache(messagingQueue, "fetch-latency", attributeCache)
|
||||
if err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build query range params for fetch latency", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to validate query range params for fetch latency", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2619,7 +2623,7 @@ func (aH *APIHandler) getProducerData(w http.ResponseWriter, r *http.Request) {
|
||||
messagingQueue, apiErr := ParseKafkaQueueBody(r)
|
||||
|
||||
if apiErr != nil {
|
||||
zap.L().Error(apiErr.Err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2633,13 +2637,13 @@ func (aH *APIHandler) getProducerData(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "producer", kafkaSpanEval)
|
||||
if err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build query range params for producer", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to validate query range params for producer", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2676,7 +2680,7 @@ func (aH *APIHandler) getConsumerData(w http.ResponseWriter, r *http.Request) {
|
||||
messagingQueue, apiErr := ParseKafkaQueueBody(r)
|
||||
|
||||
if apiErr != nil {
|
||||
zap.L().Error(apiErr.Err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2686,13 +2690,13 @@ func (aH *APIHandler) getConsumerData(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "consumer", kafkaSpanEval)
|
||||
if err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build query range params for consumer", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to validate query range params for consumer", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2734,7 +2738,7 @@ func (aH *APIHandler) getPartitionOverviewLatencyData(w http.ResponseWriter, r *
|
||||
messagingQueue, apiErr := ParseKafkaQueueBody(r)
|
||||
|
||||
if apiErr != nil {
|
||||
zap.L().Error(apiErr.Err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2744,13 +2748,13 @@ func (aH *APIHandler) getPartitionOverviewLatencyData(w http.ResponseWriter, r *
|
||||
|
||||
queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "producer-topic-throughput", kafkaSpanEval)
|
||||
if err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build query range params for producer topic throughput", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to validate query range params for producer topic throughput", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2792,7 +2796,7 @@ func (aH *APIHandler) getConsumerPartitionLatencyData(w http.ResponseWriter, r *
|
||||
messagingQueue, apiErr := ParseKafkaQueueBody(r)
|
||||
|
||||
if apiErr != nil {
|
||||
zap.L().Error(apiErr.Err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2802,13 +2806,13 @@ func (aH *APIHandler) getConsumerPartitionLatencyData(w http.ResponseWriter, r *
|
||||
|
||||
queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "consumer_partition_latency", kafkaSpanEval)
|
||||
if err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build query range params for consumer partition latency", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to validate query range params for consumer partition latency", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2852,7 +2856,7 @@ func (aH *APIHandler) getProducerThroughputOverview(w http.ResponseWriter, r *ht
|
||||
|
||||
messagingQueue, apiErr := ParseKafkaQueueBody(r)
|
||||
if apiErr != nil {
|
||||
zap.L().Error(apiErr.Err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2863,13 +2867,13 @@ func (aH *APIHandler) getProducerThroughputOverview(w http.ResponseWriter, r *ht
|
||||
|
||||
producerQueryRangeParams, err := kafka.BuildQRParamsWithCache(messagingQueue, "producer-throughput-overview", attributeCache)
|
||||
if err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build query range params for producer throughput overview", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateQueryRangeParamsV3(producerQueryRangeParams); err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to validate query range params for producer throughput overview", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2905,12 +2909,12 @@ func (aH *APIHandler) getProducerThroughputOverview(w http.ResponseWriter, r *ht
|
||||
|
||||
queryRangeParams, err := kafka.BuildQRParamsWithCache(messagingQueue, "producer-throughput-overview-byte-rate", attributeCache)
|
||||
if err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build query range params for producer throughput byte rate", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to validate query range params for producer throughput byte rate", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2968,7 +2972,7 @@ func (aH *APIHandler) getProducerThroughputDetails(w http.ResponseWriter, r *htt
|
||||
messagingQueue, apiErr := ParseKafkaQueueBody(r)
|
||||
|
||||
if apiErr != nil {
|
||||
zap.L().Error(apiErr.Err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -2978,13 +2982,13 @@ func (aH *APIHandler) getProducerThroughputDetails(w http.ResponseWriter, r *htt
|
||||
|
||||
queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "producer-throughput-details", kafkaSpanEval)
|
||||
if err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build query range params for producer throughput details", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to validate query range params for producer throughput details", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -3026,7 +3030,7 @@ func (aH *APIHandler) getConsumerThroughputOverview(w http.ResponseWriter, r *ht
|
||||
messagingQueue, apiErr := ParseKafkaQueueBody(r)
|
||||
|
||||
if apiErr != nil {
|
||||
zap.L().Error(apiErr.Err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -3036,13 +3040,13 @@ func (aH *APIHandler) getConsumerThroughputOverview(w http.ResponseWriter, r *ht
|
||||
|
||||
queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "consumer-throughput-overview", kafkaSpanEval)
|
||||
if err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build query range params for consumer throughput overview", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to validate query range params for consumer throughput overview", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -3084,7 +3088,7 @@ func (aH *APIHandler) getConsumerThroughputDetails(w http.ResponseWriter, r *htt
|
||||
messagingQueue, apiErr := ParseKafkaQueueBody(r)
|
||||
|
||||
if apiErr != nil {
|
||||
zap.L().Error(apiErr.Err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -3094,13 +3098,13 @@ func (aH *APIHandler) getConsumerThroughputDetails(w http.ResponseWriter, r *htt
|
||||
|
||||
queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "consumer-throughput-details", kafkaSpanEval)
|
||||
if err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build query range params for consumer throughput details", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to validate query range params for consumer throughput details", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -3145,7 +3149,7 @@ func (aH *APIHandler) getProducerConsumerEval(w http.ResponseWriter, r *http.Req
|
||||
messagingQueue, apiErr := ParseKafkaQueueBody(r)
|
||||
|
||||
if apiErr != nil {
|
||||
zap.L().Error(apiErr.Err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -3155,7 +3159,7 @@ func (aH *APIHandler) getProducerConsumerEval(w http.ResponseWriter, r *http.Req
|
||||
|
||||
queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "producer-consumer-eval", kafkaSpanEval)
|
||||
if err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build query range params for producer consumer eval", "error", err)
|
||||
RespondError(w, &model.ApiError{
|
||||
Typ: model.ErrorBadData,
|
||||
Err: err,
|
||||
@@ -3164,7 +3168,7 @@ func (aH *APIHandler) getProducerConsumerEval(w http.ResponseWriter, r *http.Req
|
||||
}
|
||||
|
||||
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to validate query range params for producer consumer eval", "error", err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -4252,7 +4256,7 @@ func (aH *APIHandler) CreateLogsPipeline(w http.ResponseWriter, r *http.Request)
|
||||
postable []pipelinetypes.PostablePipeline,
|
||||
) (*logparsingpipeline.PipelinesResponse, error) {
|
||||
if len(postable) == 0 {
|
||||
zap.L().Warn("found no pipelines in the http request, this will delete all the pipelines")
|
||||
aH.logger.WarnContext(r.Context(), "found no pipelines in the http request, this will delete all the pipelines")
|
||||
}
|
||||
|
||||
err := aH.LogsParsingPipelineController.ValidatePipelines(ctx, postable)
|
||||
@@ -4450,7 +4454,7 @@ func (aH *APIHandler) QueryRangeV3Format(w http.ResponseWriter, r *http.Request)
|
||||
queryRangeParams, apiErrorObj := ParseQueryRangeParams(r)
|
||||
|
||||
if apiErrorObj != nil {
|
||||
zap.L().Error(apiErrorObj.Err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "error parsing query range params", "error", apiErrorObj.Err)
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
@@ -4512,13 +4516,13 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
// check if traceID is used as filter (with equal/similar operator) in traces query if yes add timestamp filter to queryRange params
|
||||
isUsed, traceIDs := tracesV3.TraceIdFilterUsedWithEqual(queryRangeParams)
|
||||
if isUsed && len(traceIDs) > 0 {
|
||||
zap.L().Debug("traceID used as filter in traces query")
|
||||
aH.logger.DebugContext(ctx, "trace_id used as filter in traces query")
|
||||
// query signoz_spans table with traceID to get min and max timestamp
|
||||
min, max, err := aH.reader.GetMinAndMaxTimestampForTraceID(ctx, traceIDs)
|
||||
if err == nil {
|
||||
// add timestamp filter to queryRange params
|
||||
tracesV3.AddTimestampFilters(min, max, queryRangeParams)
|
||||
zap.L().Debug("post adding timestamp filter in traces query", zap.Any("queryRangeParams", queryRangeParams))
|
||||
aH.logger.DebugContext(ctx, "post adding timestamp filter in traces query", "query_range_params", queryRangeParams)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4529,9 +4533,8 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
onQueryFinished, apiErr := aH.reader.ReportQueryStartForProgressTracking(queryIdHeader)
|
||||
|
||||
if apiErr != nil {
|
||||
zap.L().Error(
|
||||
"couldn't report query start for progress tracking",
|
||||
zap.String("queryId", queryIdHeader), zap.Error(apiErr),
|
||||
aH.logger.ErrorContext(ctx, "failed to report query start for progress tracking",
|
||||
"query_id", queryIdHeader, "error", apiErr,
|
||||
)
|
||||
|
||||
} else {
|
||||
@@ -4706,7 +4709,7 @@ func (aH *APIHandler) QueryRangeV3(w http.ResponseWriter, r *http.Request) {
|
||||
queryRangeParams, apiErrorObj := ParseQueryRangeParams(r)
|
||||
|
||||
if apiErrorObj != nil {
|
||||
zap.L().Error("error parsing metric query range params", zap.Error(apiErrorObj.Err))
|
||||
aH.logger.ErrorContext(r.Context(), "error parsing metric query range params", "error", apiErrorObj.Err)
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
@@ -4714,7 +4717,7 @@ func (aH *APIHandler) QueryRangeV3(w http.ResponseWriter, r *http.Request) {
|
||||
// add temporality for each metric
|
||||
temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams)
|
||||
if temporalityErr != nil {
|
||||
zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr))
|
||||
aH.logger.ErrorContext(r.Context(), "error adding temporality for metrics", "error", temporalityErr)
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
|
||||
return
|
||||
}
|
||||
@@ -4758,9 +4761,8 @@ func (aH *APIHandler) GetQueryProgressUpdates(w http.ResponseWriter, r *http.Req
|
||||
progressCh, unsubscribe, apiErr := aH.reader.SubscribeToQueryProgress(queryId)
|
||||
if apiErr != nil {
|
||||
// Shouldn't happen unless query progress requested after query finished
|
||||
zap.L().Warn(
|
||||
"couldn't subscribe to query progress",
|
||||
zap.String("queryId", queryId), zap.Any("error", apiErr),
|
||||
aH.logger.WarnContext(r.Context(), "failed to subscribe to query progress",
|
||||
"query_id", queryId, "error", apiErr,
|
||||
)
|
||||
return
|
||||
}
|
||||
@@ -4769,25 +4771,22 @@ func (aH *APIHandler) GetQueryProgressUpdates(w http.ResponseWriter, r *http.Req
|
||||
for queryProgress := range progressCh {
|
||||
msg, err := json.Marshal(queryProgress)
|
||||
if err != nil {
|
||||
zap.L().Error(
|
||||
"failed to serialize progress message",
|
||||
zap.String("queryId", queryId), zap.Any("progress", queryProgress), zap.Error(err),
|
||||
aH.logger.ErrorContext(r.Context(), "failed to serialize progress message",
|
||||
"query_id", queryId, "progress", queryProgress, "error", err,
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
err = c.WriteMessage(websocket.TextMessage, msg)
|
||||
if err != nil {
|
||||
zap.L().Error(
|
||||
"failed to write progress msg to websocket",
|
||||
zap.String("queryId", queryId), zap.String("msg", string(msg)), zap.Error(err),
|
||||
aH.logger.ErrorContext(r.Context(), "failed to write progress message to websocket",
|
||||
"query_id", queryId, "msg", string(msg), "error", err,
|
||||
)
|
||||
break
|
||||
|
||||
} else {
|
||||
zap.L().Debug(
|
||||
"wrote progress msg to websocket",
|
||||
zap.String("queryId", queryId), zap.String("msg", string(msg)), zap.Error(err),
|
||||
aH.logger.DebugContext(r.Context(), "wrote progress message to websocket",
|
||||
"query_id", queryId, "msg", string(msg),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -4871,13 +4870,13 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
||||
// check if traceID is used as filter (with equal/similar operator) in traces query if yes add timestamp filter to queryRange params
|
||||
isUsed, traceIDs := tracesV3.TraceIdFilterUsedWithEqual(queryRangeParams)
|
||||
if isUsed && len(traceIDs) > 0 {
|
||||
zap.L().Debug("traceID used as filter in traces query")
|
||||
aH.logger.DebugContext(ctx, "trace_id used as filter in traces query")
|
||||
// query signoz_spans table with traceID to get min and max timestamp
|
||||
min, max, err := aH.reader.GetMinAndMaxTimestampForTraceID(ctx, traceIDs)
|
||||
if err == nil {
|
||||
// add timestamp filter to queryRange params
|
||||
tracesV3.AddTimestampFilters(min, max, queryRangeParams)
|
||||
zap.L().Debug("post adding timestamp filter in traces query", zap.Any("queryRangeParams", queryRangeParams))
|
||||
aH.logger.DebugContext(ctx, "post adding timestamp filter in traces query", "query_range_params", queryRangeParams)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4929,7 +4928,7 @@ func (aH *APIHandler) QueryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
queryRangeParams, apiErrorObj := ParseQueryRangeParams(r)
|
||||
|
||||
if apiErrorObj != nil {
|
||||
zap.L().Error("error parsing metric query range params", zap.Error(apiErrorObj.Err))
|
||||
aH.logger.ErrorContext(r.Context(), "error parsing metric query range params", "error", apiErrorObj.Err)
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
@@ -4938,7 +4937,7 @@ func (aH *APIHandler) QueryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
// add temporality for each metric
|
||||
temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams)
|
||||
if temporalityErr != nil {
|
||||
zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr))
|
||||
aH.logger.ErrorContext(r.Context(), "error adding temporality for metrics", "error", temporalityErr)
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
|
||||
return
|
||||
}
|
||||
@@ -4987,7 +4986,7 @@ func (aH *APIHandler) getQueueOverview(w http.ResponseWriter, r *http.Request) {
|
||||
queueListRequest, apiErr := ParseQueueBody(r)
|
||||
|
||||
if apiErr != nil {
|
||||
zap.L().Error(apiErr.Err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to parse queue body", "error", apiErr.Err)
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
@@ -4995,7 +4994,7 @@ func (aH *APIHandler) getQueueOverview(w http.ResponseWriter, r *http.Request) {
|
||||
chq, err := queues2.BuildOverviewQuery(queueListRequest)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build queue overview query", "error", err)
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
@@ -5026,7 +5025,7 @@ func (aH *APIHandler) getDomainList(w http.ResponseWriter, r *http.Request) {
|
||||
// Parse the request body to get third-party query parameters
|
||||
thirdPartyQueryRequest, apiErr := ParseRequestBody(r)
|
||||
if apiErr != nil {
|
||||
zap.L().Error("Failed to parse request body", zap.Error(apiErr))
|
||||
aH.logger.ErrorContext(r.Context(), "failed to parse request body", "error", apiErr)
|
||||
render.Error(w, errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, apiErr.Error()))
|
||||
return
|
||||
}
|
||||
@@ -5034,7 +5033,7 @@ func (aH *APIHandler) getDomainList(w http.ResponseWriter, r *http.Request) {
|
||||
// Build the v5 query range request for domain listing
|
||||
queryRangeRequest, err := thirdpartyapi.BuildDomainList(thirdPartyQueryRequest)
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to build domain list query", zap.Error(err))
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build domain list query", "error", err)
|
||||
apiErrObj := errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error())
|
||||
render.Error(w, apiErrObj)
|
||||
return
|
||||
@@ -5047,7 +5046,7 @@ func (aH *APIHandler) getDomainList(w http.ResponseWriter, r *http.Request) {
|
||||
// Execute the query using the v5 querier
|
||||
result, err := aH.Signoz.Querier.QueryRange(ctx, orgID, queryRangeRequest)
|
||||
if err != nil {
|
||||
zap.L().Error("Query execution failed", zap.Error(err))
|
||||
aH.logger.ErrorContext(r.Context(), "query execution failed", "error", err)
|
||||
apiErrObj := errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error())
|
||||
render.Error(w, apiErrObj)
|
||||
return
|
||||
@@ -5086,7 +5085,7 @@ func (aH *APIHandler) getDomainInfo(w http.ResponseWriter, r *http.Request) {
|
||||
// Parse the request body to get third-party query parameters
|
||||
thirdPartyQueryRequest, apiErr := ParseRequestBody(r)
|
||||
if apiErr != nil {
|
||||
zap.L().Error("Failed to parse request body", zap.Error(apiErr))
|
||||
aH.logger.ErrorContext(r.Context(), "failed to parse request body", "error", apiErr)
|
||||
render.Error(w, errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, apiErr.Error()))
|
||||
return
|
||||
}
|
||||
@@ -5094,7 +5093,7 @@ func (aH *APIHandler) getDomainInfo(w http.ResponseWriter, r *http.Request) {
|
||||
// Build the v5 query range request for domain info
|
||||
queryRangeRequest, err := thirdpartyapi.BuildDomainInfo(thirdPartyQueryRequest)
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to build domain info query", zap.Error(err))
|
||||
aH.logger.ErrorContext(r.Context(), "failed to build domain info query", "error", err)
|
||||
apiErrObj := errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error())
|
||||
render.Error(w, apiErrObj)
|
||||
return
|
||||
@@ -5107,7 +5106,7 @@ func (aH *APIHandler) getDomainInfo(w http.ResponseWriter, r *http.Request) {
|
||||
// Execute the query using the v5 querier
|
||||
result, err := aH.Signoz.Querier.QueryRange(ctx, orgID, queryRangeRequest)
|
||||
if err != nil {
|
||||
zap.L().Error("Query execution failed", zap.Error(err))
|
||||
aH.logger.ErrorContext(r.Context(), "query execution failed", "error", err)
|
||||
apiErrObj := errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error())
|
||||
render.Error(w, apiErrObj)
|
||||
return
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user