Compare commits

...

33 Commits

Author SHA1 Message Date
swapnil-signoz
0369842f3d refactor: clean up 2026-03-17 23:40:14 +05:30
swapnil-signoz
59cd96562a Merge branch 'refactor/cloud-integration-types' into refactor/cloud-integration-impl-store 2026-03-17 23:10:54 +05:30
swapnil-signoz
cc4475cab7 refactor: updating store methods 2026-03-17 23:10:15 +05:30
swapnil-signoz
ac8c648420 Merge branch 'refactor/cloud-integration-types' into refactor/cloud-integration-impl-store 2026-03-17 21:09:47 +05:30
swapnil-signoz
bede6be4b8 feat: adding method for service id creation 2026-03-17 21:09:26 +05:30
swapnil-signoz
dd3d60e6df Merge branch 'refactor/cloud-integration-types' into refactor/cloud-integration-impl-store 2026-03-17 20:49:31 +05:30
swapnil-signoz
538ab686d2 refactor: using serviceID type 2026-03-17 20:49:17 +05:30
swapnil-signoz
936a325cb9 Merge branch 'refactor/cloud-integration-types' into refactor/cloud-integration-impl-store 2026-03-17 17:25:58 +05:30
swapnil-signoz
c6cdcd0143 refactor: renaming service type to service id 2026-03-17 17:25:29 +05:30
swapnil-signoz
cd9211d718 refactor: clean up types 2026-03-17 17:04:27 +05:30
swapnil-signoz
0601c28782 feat: adding integration test 2026-03-17 11:02:46 +05:30
swapnil-signoz
580610dbfa Merge branch 'main' into refactor/cloud-integration-impl-store 2026-03-16 23:02:19 +05:30
Nageshbansal
7371dcacf0 chore: deprecates generator from deploy (#10447) 2026-03-16 14:55:33 +00:00
Abhi kumar
3cdf3e06f3 feat: added chart appearance settings in panel (#10573)
* feat: added section in panel settings

* chore: minor changes

* fix: fixed failing tests

* fix: minor style fixes

* chore: updated the categorisation

* feat: added chart appearance settings in panel

* feat: added fill mode in timeseries

* chore: updated styles + made panel config resizable

* chore: updated styles

* chore: minor styles improvements

* chore: formatting unit section fix

* chore: disabled chart apperance section

* chore: prettier fmt fix

* fix: transform react-resizable-panels in jest config

* fix: failing test

* chore: updated transition timing

* chore: fixed resizable handle styling

* chore: pr review changes

* chore: pr review changes
2026-03-16 14:08:15 +00:00
swapnil-signoz
2d2aa02a81 refactor: split upsert store method 2026-03-16 18:27:42 +05:30
swapnil-signoz
dd9723ad13 Merge branch 'refactor/cloud-integration-types' into refactor/cloud-integration-impl-store 2026-03-16 17:42:03 +05:30
swapnil-signoz
3651469416 Merge branch 'main' of https://github.com/SigNoz/signoz into refactor/cloud-integration-types 2026-03-16 17:41:52 +05:30
swapnil-signoz
febce75734 refactor: update Dashboard struct comments and remove unused fields 2026-03-16 17:41:28 +05:30
Pandey
f8c38df2bf refactor: replace zap logger with slog across codebase (#10599)
* refactor: replace zap logger with slog across codebase

* refactor: fix lint

* refactor: fix lint
2026-03-16 12:09:39 +00:00
swapnil-signoz
e1616f3487 Merge branch 'refactor/cloud-integration-types' into refactor/cloud-integration-impl-store 2026-03-16 17:36:15 +05:30
Pandey
cab4a56694 chore: add myself as codeowner for CI and go.mod (#10597)
Clarified CODEOWNERS comments and updated owner assignments.
2026-03-16 10:01:36 +00:00
Ashwin Bhatkal
78041fe457 chore: send slack notification on dequeue only and not merge (#10596)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
2026-03-16 09:38:04 +00:00
Ashwin Bhatkal
09b6382820 chore: separate dashboard slider from dashboard provider + refactor (#10572)
* chore: separate dashboard slider from dashboard provider + refactor

* chore: resolve self comments
2026-03-16 08:12:09 +00:00
Ashwin Bhatkal
9689b847f0 chore: add slack notification on dequeue from merge queue (#10580)
* chore: add slack notification on merge queue failure

* chore: break type

* chore: update yaml

* chore: update yaml

* chore: update yaml

* chore: update yaml

* chore: update yaml

* chore: update yaml

* chore: update yaml

* chore: resolve comments
2026-03-16 07:12:19 +00:00
Vishal Sharma
15e5938e95 fix: add allInOneLightMode SVG for light mode (#10589) 2026-03-16 06:59:28 +00:00
swapnil-signoz
11ed15f4c5 feat: implement cloud integration store 2026-03-14 17:05:02 +05:30
swapnil-signoz
f47877cca9 Merge branch 'refactor/cloud-integration-types' into refactor/cloud-integration-impl-store 2026-03-14 17:01:51 +05:30
swapnil-signoz
3111904223 Merge branch 'refactor/cloud-integration-types' into refactor/cloud-integration-impl-store 2026-03-14 16:36:35 +05:30
swapnil-signoz
a09dc325de Merge branch 'main' into refactor/cloud-integration-impl-store 2026-03-02 16:39:20 +05:30
swapnil-signoz
379b4f7fc4 refactor: removing interface check 2026-03-02 14:50:37 +05:30
swapnil-signoz
5e536ae077 Merge branch 'refactor/cloud-integration-types' into refactor/cloud-integration-impl-store 2026-03-02 14:49:35 +05:30
swapnil-signoz
2cc14f1ad4 Merge branch 'main' into refactor/cloud-integration-impl-store 2026-03-02 14:49:00 +05:30
swapnil-signoz
dc4ed4d239 feat: adding sql store implementation 2026-03-02 14:44:56 +05:30
120 changed files with 4098 additions and 1956 deletions

10
.github/CODEOWNERS vendored
View File

@@ -1,8 +1,6 @@
# CODEOWNERS info: https://help.github.com/en/articles/about-code-owners # CODEOWNERS info: https://help.github.com/en/articles/about-code-owners
# Owners are automatically requested for review for PRs that changes code # Owners are automatically requested for review for PRs that changes code that they own.
# that they own.
/frontend/ @SigNoz/frontend-maintainers /frontend/ @SigNoz/frontend-maintainers
@@ -11,8 +9,10 @@
/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json @makeavish /frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json @makeavish
/frontend/src/container/OnboardingV2Container/AddDataSource/AddDataSource.tsx @makeavish /frontend/src/container/OnboardingV2Container/AddDataSource/AddDataSource.tsx @makeavish
/deploy/ @SigNoz/devops # CI
.github @SigNoz/devops /deploy/ @therealpandey
.github @therealpandey
go.mod @therealpandey
# Scaffold Owners # Scaffold Owners

60
.github/workflows/mergequeueci.yaml vendored Normal file
View File

@@ -0,0 +1,60 @@
name: mergequeueci
on:
pull_request:
types:
- dequeued
jobs:
notify:
runs-on: ubuntu-latest
if: github.event.pull_request.merged == false
steps:
- name: alert
uses: slackapi/slack-github-action@v2.1.1
with:
webhook: ${{ secrets.SLACK_MERGE_QUEUE_WEBHOOK }}
webhook-type: incoming-webhook
payload: |
{
"text": ":x: PR removed from merge queue",
"blocks": [
{
"type": "header",
"text": {
"type": "plain_text",
"text": ":x: PR Removed from Merge Queue"
}
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "*<${{ github.event.pull_request.html_url }}|PR #${{ github.event.pull_request.number }}: ${{ github.event.pull_request.title }}>*"
}
},
{
"type": "divider"
},
{
"type": "section",
"fields": [
{
"type": "mrkdwn",
"text": "*Author*\n@${{ github.event.pull_request.user.login }}"
}
]
}
]
}
- name: comment
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR_NUMBER: ${{ github.event.pull_request.number }}
PR_AUTHOR: ${{ github.event.pull_request.user.login }}
PR_URL: ${{ github.event.pull_request.html_url }}
run: |
gh api repos/${{ github.repository }}/issues/$PR_NUMBER/comments \
-f body="> :x: **PR removed from merge queue**
>
> @$PR_AUTHOR your PR was removed from the merge queue. Fix the issue and re-queue when ready."

View File

@@ -6,7 +6,6 @@ import (
"github.com/SigNoz/signoz/pkg/version" "github.com/SigNoz/signoz/pkg/version"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"go.uber.org/zap" //nolint:depguard
) )
var RootCmd = &cobra.Command{ var RootCmd = &cobra.Command{
@@ -19,12 +18,6 @@ var RootCmd = &cobra.Command{
} }
func Execute(logger *slog.Logger) { func Execute(logger *slog.Logger) {
zapLogger := newZapLogger()
zap.ReplaceGlobals(zapLogger)
defer func() {
_ = zapLogger.Sync()
}()
err := RootCmd.Execute() err := RootCmd.Execute()
if err != nil { if err != nil {
logger.ErrorContext(RootCmd.Context(), "error running command", "error", err) logger.ErrorContext(RootCmd.Context(), "error running command", "error", err)

View File

@@ -1,110 +0,0 @@
package cmd
import (
"context"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"go.uber.org/zap" //nolint:depguard
"go.uber.org/zap/zapcore" //nolint:depguard
)
// Deprecated: Use `NewLogger` from `pkg/instrumentation` instead.
func newZapLogger() *zap.Logger {
config := zap.NewProductionConfig()
config.EncoderConfig.TimeKey = "timestamp"
config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
// Extract sampling config before building the logger.
// We need to disable sampling in the config and apply it manually later
// to ensure correct core ordering. See filteringCore documentation for details.
samplerConfig := config.Sampling
config.Sampling = nil
logger, _ := config.Build()
// Wrap with custom core wrapping to filter certain log entries.
// The order of wrapping is important:
// 1. First wrap with filteringCore
// 2. Then wrap with sampler
//
// This creates the call chain: sampler -> filteringCore -> ioCore
//
// During logging:
// - sampler.Check decides whether to sample the log entry
// - If sampled, filteringCore.Check is called
// - filteringCore adds itself to CheckedEntry.cores
// - All cores in CheckedEntry.cores have their Write method called
// - filteringCore.Write can now filter the entry before passing to ioCore
//
// If we didn't disable the sampler above, filteringCore would have wrapped
// sampler. By calling sampler.Check we would have allowed it to call
// ioCore.Check that adds itself to CheckedEntry.cores. Then ioCore.Write
// would have bypassed our checks, making filtering impossible.
return logger.WithOptions(zap.WrapCore(func(core zapcore.Core) zapcore.Core {
core = &filteringCore{core}
if samplerConfig != nil {
core = zapcore.NewSamplerWithOptions(
core,
time.Second,
samplerConfig.Initial,
samplerConfig.Thereafter,
)
}
return core
}))
}
// filteringCore wraps a zapcore.Core to filter out log entries based on a
// custom logic.
//
// Note: This core must be positioned before the sampler in the core chain
// to ensure Write is called. See newZapLogger for ordering details.
type filteringCore struct {
zapcore.Core
}
// filter determines whether a log entry should be written based on its fields.
// Returns false if the entry should be suppressed, true otherwise.
//
// Current filters:
// - context.Canceled: These are expected errors from cancelled operations,
// and create noise in logs.
func (c *filteringCore) filter(fields []zapcore.Field) bool {
for _, field := range fields {
if field.Type == zapcore.ErrorType {
if loggedErr, ok := field.Interface.(error); ok {
// Suppress logs containing context.Canceled errors
if errors.Is(loggedErr, context.Canceled) {
return false
}
}
}
}
return true
}
// With implements zapcore.Core.With
// It returns a new copy with the added context.
func (c *filteringCore) With(fields []zapcore.Field) zapcore.Core {
return &filteringCore{c.Core.With(fields)}
}
// Check implements zapcore.Core.Check.
// It adds this core to the CheckedEntry if the log level is enabled,
// ensuring that Write will be called for this entry.
func (c *filteringCore) Check(ent zapcore.Entry, ce *zapcore.CheckedEntry) *zapcore.CheckedEntry {
if c.Enabled(ent.Level) {
return ce.AddCore(ent, c)
}
return ce
}
// Write implements zapcore.Core.Write.
// It filters log entries based on their fields before delegating to the wrapped core.
func (c *filteringCore) Write(ent zapcore.Entry, fields []zapcore.Field) error {
if !c.filter(fields) {
return nil
}
return c.Core.Write(ent, fields)
}

View File

@@ -1,38 +0,0 @@
version: "3"
x-common: &common
networks:
- signoz-net
extra_hosts:
- host.docker.internal:host-gateway
logging:
options:
max-size: 50m
max-file: "3"
deploy:
restart_policy:
condition: on-failure
services:
hotrod:
<<: *common
image: jaegertracing/example-hotrod:1.61.0
command: [ "all" ]
environment:
- OTEL_EXPORTER_OTLP_ENDPOINT=http://host.docker.internal:4318 #
load-hotrod:
<<: *common
image: "signoz/locust:1.2.3"
environment:
ATTACKED_HOST: http://hotrod:8080
LOCUST_MODE: standalone
NO_PROXY: standalone
TASK_DELAY_FROM: 5
TASK_DELAY_TO: 30
QUIET_MODE: "${QUIET_MODE:-false}"
LOCUST_OPTS: "--headless -u 10 -r 1"
volumes:
- ../../../common/locust-scripts:/locust
networks:
signoz-net:
name: signoz-net
external: true

View File

@@ -1,69 +0,0 @@
version: "3"
x-common: &common
networks:
- signoz-net
extra_hosts:
- host.docker.internal:host-gateway
logging:
options:
max-size: 50m
max-file: "3"
deploy:
mode: global
restart_policy:
condition: on-failure
services:
otel-agent:
<<: *common
image: otel/opentelemetry-collector-contrib:0.111.0
command:
- --config=/etc/otel-collector-config.yaml
volumes:
- ./otel-agent-config.yaml:/etc/otel-collector-config.yaml
- /:/hostfs:ro
environment:
- SIGNOZ_COLLECTOR_ENDPOINT=http://host.docker.internal:4317 # In case of external SigNoz or cloud, update the endpoint and access token
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
# - SIGNOZ_ACCESS_TOKEN="<your-access-token>"
# Before exposing the ports, make sure the ports are not used by other services
# ports:
# - "4317:4317"
# - "4318:4318"
otel-metrics:
<<: *common
image: otel/opentelemetry-collector-contrib:0.111.0
user: 0:0 # If you have security concerns, you can replace this with your `UID:GID` that has necessary permissions to docker.sock
command:
- --config=/etc/otel-collector-config.yaml
volumes:
- ./otel-metrics-config.yaml:/etc/otel-collector-config.yaml
- /var/run/docker.sock:/var/run/docker.sock
environment:
- SIGNOZ_COLLECTOR_ENDPOINT=http://host.docker.internal:4317 # In case of external SigNoz or cloud, update the endpoint and access token
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
# - SIGNOZ_ACCESS_TOKEN="<your-access-token>"
# Before exposing the ports, make sure the ports are not used by other services
# ports:
# - "4317:4317"
# - "4318:4318"
deploy:
mode: replicated
replicas: 1
placement:
constraints:
- node.role == manager
logspout:
<<: *common
image: "gliderlabs/logspout:v3.2.14"
command: syslog+tcp://otel-agent:2255
user: root
volumes:
- /etc/hostname:/etc/host_hostname:ro
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
- otel-agent
networks:
signoz-net:
name: signoz-net
external: true

View File

@@ -1,102 +0,0 @@
receivers:
hostmetrics:
collection_interval: 30s
root_path: /hostfs
scrapers:
cpu: {}
load: {}
memory: {}
disk: {}
filesystem: {}
network: {}
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-agent
static_configs:
- targets:
- localhost:8888
labels:
job_name: otel-agent
tcplog/docker:
listen_address: "0.0.0.0:2255"
operators:
- type: regex_parser
regex: '^<([0-9]+)>[0-9]+ (?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P<container_id>\S+) (?P<container_name>\S+) [0-9]+ - -( (?P<body>.*))?'
timestamp:
parse_from: attributes.timestamp
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
- type: move
from: attributes["body"]
to: body
- type: remove
field: attributes.timestamp
# please remove names from below if you want to collect logs from them
- type: filter
id: signoz_logs_filter
expr: 'attributes.container_name matches "^(signoz_(logspout|signoz|otel-collector|clickhouse|zookeeper))|(infra_(logspout|otel-agent|otel-metrics)).*"'
processors:
batch:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
resourcedetection:
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
detectors:
# - ec2
# - gcp
# - azure
- env
- system
timeout: 2s
extensions:
health_check:
endpoint: 0.0.0.0:13133
pprof:
endpoint: 0.0.0.0:1777
exporters:
otlp:
endpoint: ${env:SIGNOZ_COLLECTOR_ENDPOINT}
tls:
insecure: true
headers:
signoz-access-token: ${env:SIGNOZ_ACCESS_TOKEN}
# debug: {}
service:
telemetry:
logs:
encoding: json
metrics:
address: 0.0.0.0:8888
extensions:
- health_check
- pprof
pipelines:
traces:
receivers: [otlp]
processors: [resourcedetection, batch]
exporters: [otlp]
metrics:
receivers: [otlp]
processors: [resourcedetection, batch]
exporters: [otlp]
metrics/hostmetrics:
receivers: [hostmetrics]
processors: [resourcedetection, batch]
exporters: [otlp]
metrics/prometheus:
receivers: [prometheus]
processors: [resourcedetection, batch]
exporters: [otlp]
logs:
receivers: [otlp, tcplog/docker]
processors: [resourcedetection, batch]
exporters: [otlp]

View File

@@ -1,103 +0,0 @@
receivers:
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-metrics
static_configs:
- targets:
- localhost:8888
labels:
job_name: otel-metrics
# For Docker daemon metrics to be scraped, it must be configured to expose
# Prometheus metrics, as documented here: https://docs.docker.com/config/daemon/prometheus/
# - job_name: docker-daemon
# dockerswarm_sd_configs:
# - host: unix:///var/run/docker.sock
# role: nodes
# relabel_configs:
# - source_labels: [__meta_dockerswarm_node_address]
# target_label: __address__
# replacement: $1:9323
- job_name: "dockerswarm"
dockerswarm_sd_configs:
- host: unix:///var/run/docker.sock
role: tasks
relabel_configs:
- action: keep
regex: running
source_labels:
- __meta_dockerswarm_task_desired_state
- action: keep
regex: true
source_labels:
- __meta_dockerswarm_service_label_signoz_io_scrape
- regex: ([^:]+)(?::\d+)?
replacement: $1
source_labels:
- __address__
target_label: swarm_container_ip
- separator: .
source_labels:
- __meta_dockerswarm_service_name
- __meta_dockerswarm_task_slot
- __meta_dockerswarm_task_id
target_label: swarm_container_name
- target_label: __address__
source_labels:
- swarm_container_ip
- __meta_dockerswarm_service_label_signoz_io_port
separator: ":"
- source_labels:
- __meta_dockerswarm_service_label_signoz_io_path
target_label: __metrics_path__
- source_labels:
- __meta_dockerswarm_service_label_com_docker_stack_namespace
target_label: namespace
- source_labels:
- __meta_dockerswarm_service_name
target_label: service_name
- source_labels:
- __meta_dockerswarm_task_id
target_label: service_instance_id
- source_labels:
- __meta_dockerswarm_node_hostname
target_label: host_name
processors:
batch:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
resourcedetection:
detectors:
- env
- system
timeout: 2s
extensions:
health_check:
endpoint: 0.0.0.0:13133
pprof:
endpoint: 0.0.0.0:1777
exporters:
otlp:
endpoint: ${env:SIGNOZ_COLLECTOR_ENDPOINT}
tls:
insecure: true
headers:
signoz-access-token: ${env:SIGNOZ_ACCESS_TOKEN}
# debug: {}
service:
telemetry:
logs:
encoding: json
metrics:
address: 0.0.0.0:8888
extensions:
- health_check
- pprof
pipelines:
metrics:
receivers: [prometheus]
processors: [resourcedetection, batch]
exporters: [otlp]

View File

@@ -1,39 +0,0 @@
version: "3"
x-common: &common
networks:
- signoz-net
extra_hosts:
- host.docker.internal:host-gateway
logging:
options:
max-size: 50m
max-file: "3"
restart: unless-stopped
services:
hotrod:
<<: *common
image: jaegertracing/example-hotrod:1.61.0
container_name: hotrod
command: [ "all" ]
environment:
- OTEL_EXPORTER_OTLP_ENDPOINT=http://host.docker.internal:4318 # In case of external SigNoz or cloud, update the endpoint and access token
# - OTEL_OTLP_HEADERS=signoz-access-token=<your-access-token>
load-hotrod:
<<: *common
image: "signoz/locust:1.2.3"
container_name: load-hotrod
environment:
ATTACKED_HOST: http://hotrod:8080
LOCUST_MODE: standalone
NO_PROXY: standalone
TASK_DELAY_FROM: 5
TASK_DELAY_TO: 30
QUIET_MODE: "${QUIET_MODE:-false}"
LOCUST_OPTS: "--headless -u 10 -r 1"
volumes:
- ../../../common/locust-scripts:/locust
networks:
signoz-net:
name: signoz-net
external: true

View File

@@ -1,43 +0,0 @@
version: "3"
x-common: &common
networks:
- signoz-net
extra_hosts:
- host.docker.internal:host-gateway
logging:
options:
max-size: 50m
max-file: "3"
restart: unless-stopped
services:
otel-agent:
<<: *common
image: otel/opentelemetry-collector-contrib:0.111.0
command:
- --config=/etc/otel-collector-config.yaml
volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
- /:/hostfs:ro
- /var/run/docker.sock:/var/run/docker.sock
environment:
- SIGNOZ_COLLECTOR_ENDPOINT=http://host.docker.internal:4317 # In case of external SigNoz or cloud, update the endpoint and access token
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux # Replace signoz-host with the actual hostname
# - SIGNOZ_ACCESS_TOKEN="<your-access-token>"
# Before exposing the ports, make sure the ports are not used by other services
# ports:
# - "4317:4317"
# - "4318:4318"
logspout:
<<: *common
image: "gliderlabs/logspout:v3.2.14"
volumes:
- /etc/hostname:/etc/host_hostname:ro
- /var/run/docker.sock:/var/run/docker.sock
command: syslog+tcp://otel-agent:2255
depends_on:
- otel-agent
networks:
signoz-net:
name: signoz-net
external: true

View File

@@ -1,139 +0,0 @@
receivers:
hostmetrics:
collection_interval: 30s
root_path: /hostfs
scrapers:
cpu: {}
load: {}
memory: {}
disk: {}
filesystem: {}
network: {}
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-collector
static_configs:
- targets:
- localhost:8888
labels:
job_name: otel-collector
# For Docker daemon metrics to be scraped, it must be configured to expose
# Prometheus metrics, as documented here: https://docs.docker.com/config/daemon/prometheus/
# - job_name: docker-daemon
# static_configs:
# - targets:
# - host.docker.internal:9323
# labels:
# job_name: docker-daemon
- job_name: docker-container
docker_sd_configs:
- host: unix:///var/run/docker.sock
relabel_configs:
- action: keep
regex: true
source_labels:
- __meta_docker_container_label_signoz_io_scrape
- regex: true
source_labels:
- __meta_docker_container_label_signoz_io_path
target_label: __metrics_path__
- regex: (.+)
source_labels:
- __meta_docker_container_label_signoz_io_path
target_label: __metrics_path__
- separator: ":"
source_labels:
- __meta_docker_network_ip
- __meta_docker_container_label_signoz_io_port
target_label: __address__
- regex: '/(.*)'
replacement: '$1'
source_labels:
- __meta_docker_container_name
target_label: container_name
- regex: __meta_docker_container_label_signoz_io_(.+)
action: labelmap
replacement: $1
tcplog/docker:
listen_address: "0.0.0.0:2255"
operators:
- type: regex_parser
regex: '^<([0-9]+)>[0-9]+ (?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P<container_id>\S+) (?P<container_name>\S+) [0-9]+ - -( (?P<body>.*))?'
timestamp:
parse_from: attributes.timestamp
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
- type: move
from: attributes["body"]
to: body
- type: remove
field: attributes.timestamp
# please remove names from below if you want to collect logs from them
- type: filter
id: signoz_logs_filter
expr: 'attributes.container_name matches "^signoz|(signoz-(|otel-collector|clickhouse|zookeeper))|(infra-(logspout|otel-agent)-.*)"'
processors:
batch:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
resourcedetection:
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
detectors:
# - ec2
# - gcp
# - azure
- env
- system
timeout: 2s
extensions:
health_check:
endpoint: 0.0.0.0:13133
pprof:
endpoint: 0.0.0.0:1777
exporters:
otlp:
endpoint: ${env:SIGNOZ_COLLECTOR_ENDPOINT}
tls:
insecure: true
headers:
signoz-access-token: ${env:SIGNOZ_ACCESS_TOKEN}
# debug: {}
service:
telemetry:
logs:
encoding: json
metrics:
address: 0.0.0.0:8888
extensions:
- health_check
- pprof
pipelines:
traces:
receivers: [otlp]
processors: [resourcedetection, batch]
exporters: [otlp]
metrics:
receivers: [otlp]
processors: [resourcedetection, batch]
exporters: [otlp]
metrics/hostmetrics:
receivers: [hostmetrics]
processors: [resourcedetection, batch]
exporters: [otlp]
metrics/prometheus:
receivers: [prometheus]
processors: [resourcedetection, batch]
exporters: [otlp]
logs:
receivers: [otlp, tcplog/docker]
processors: [resourcedetection, batch]
exporters: [otlp]

View File

@@ -2,6 +2,7 @@ package anomaly
import ( import (
"context" "context"
"log/slog"
"math" "math"
"time" "time"
@@ -13,7 +14,6 @@ import (
"github.com/SigNoz/signoz/pkg/types/ctxtypes" "github.com/SigNoz/signoz/pkg/types/ctxtypes"
"github.com/SigNoz/signoz/pkg/types/instrumentationtypes" "github.com/SigNoz/signoz/pkg/types/instrumentationtypes"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
) )
var ( var (
@@ -67,7 +67,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
instrumentationtypes.CodeNamespace: "anomaly", instrumentationtypes.CodeNamespace: "anomaly",
instrumentationtypes.CodeFunctionName: "getResults", instrumentationtypes.CodeFunctionName: "getResults",
}) })
zap.L().Info("fetching results for current period", zap.Any("currentPeriodQuery", params.CurrentPeriodQuery)) slog.InfoContext(ctx, "fetching results for current period", "current_period_query", params.CurrentPeriodQuery)
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentPeriodQuery) currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentPeriodQuery)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -78,7 +78,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
return nil, err return nil, err
} }
zap.L().Info("fetching results for past period", zap.Any("pastPeriodQuery", params.PastPeriodQuery)) slog.InfoContext(ctx, "fetching results for past period", "past_period_query", params.PastPeriodQuery)
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastPeriodQuery) pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastPeriodQuery)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -89,7 +89,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
return nil, err return nil, err
} }
zap.L().Info("fetching results for current season", zap.Any("currentSeasonQuery", params.CurrentSeasonQuery)) slog.InfoContext(ctx, "fetching results for current season", "current_season_query", params.CurrentSeasonQuery)
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentSeasonQuery) currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentSeasonQuery)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -100,7 +100,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
return nil, err return nil, err
} }
zap.L().Info("fetching results for past season", zap.Any("pastSeasonQuery", params.PastSeasonQuery)) slog.InfoContext(ctx, "fetching results for past season", "past_season_query", params.PastSeasonQuery)
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastSeasonQuery) pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastSeasonQuery)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -111,7 +111,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
return nil, err return nil, err
} }
zap.L().Info("fetching results for past 2 season", zap.Any("past2SeasonQuery", params.Past2SeasonQuery)) slog.InfoContext(ctx, "fetching results for past 2 season", "past_2_season_query", params.Past2SeasonQuery)
past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past2SeasonQuery) past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past2SeasonQuery)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -122,7 +122,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
return nil, err return nil, err
} }
zap.L().Info("fetching results for past 3 season", zap.Any("past3SeasonQuery", params.Past3SeasonQuery)) slog.InfoContext(ctx, "fetching results for past 3 season", "past_3_season_query", params.Past3SeasonQuery)
past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past3SeasonQuery) past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past3SeasonQuery)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -235,17 +235,17 @@ func (p *BaseSeasonalProvider) getPredictedSeries(
if predictedValue < 0 { if predictedValue < 0 {
// this should not happen (except when the data has extreme outliers) // this should not happen (except when the data has extreme outliers)
// we will use the moving avg of the previous period series in this case // we will use the moving avg of the previous period series in this case
zap.L().Warn("predictedValue is less than 0", zap.Float64("predictedValue", predictedValue), zap.Any("labels", series.Labels)) slog.Warn("predicted value is less than 0", "predicted_value", predictedValue, "labels", series.Labels)
predictedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx) predictedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
} }
zap.L().Debug("predictedSeries", slog.Debug("predicted series",
zap.Float64("movingAvg", movingAvg), "moving_avg", movingAvg,
zap.Float64("avg", avg), "avg", avg,
zap.Float64("mean", mean), "mean", mean,
zap.Any("labels", series.Labels), "labels", series.Labels,
zap.Float64("predictedValue", predictedValue), "predicted_value", predictedValue,
zap.Float64("curr", curr.Value), "curr", curr.Value,
) )
predictedSeries.Points = append(predictedSeries.Points, v3.Point{ predictedSeries.Points = append(predictedSeries.Points, v3.Point{
Timestamp: curr.Timestamp, Timestamp: curr.Timestamp,
@@ -418,7 +418,7 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
for _, series := range result.Series { for _, series := range result.Series {
stdDev := p.getStdDev(series) stdDev := p.getStdDev(series)
zap.L().Info("stdDev", zap.Float64("stdDev", stdDev), zap.Any("labels", series.Labels)) slog.InfoContext(ctx, "computed standard deviation", "std_dev", stdDev, "labels", series.Labels)
pastPeriodSeries := p.getMatchingSeries(pastPeriodResult, series) pastPeriodSeries := p.getMatchingSeries(pastPeriodResult, series)
currentSeasonSeries := p.getMatchingSeries(currentSeasonResult, series) currentSeasonSeries := p.getMatchingSeries(currentSeasonResult, series)
@@ -431,7 +431,7 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries) pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries) past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries) past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
zap.L().Info("getAvg", zap.Float64("prevSeriesAvg", prevSeriesAvg), zap.Float64("currentSeasonSeriesAvg", currentSeasonSeriesAvg), zap.Float64("pastSeasonSeriesAvg", pastSeasonSeriesAvg), zap.Float64("past2SeasonSeriesAvg", past2SeasonSeriesAvg), zap.Float64("past3SeasonSeriesAvg", past3SeasonSeriesAvg), zap.Any("labels", series.Labels)) slog.InfoContext(ctx, "computed averages", "prev_series_avg", prevSeriesAvg, "current_season_series_avg", currentSeasonSeriesAvg, "past_season_series_avg", pastSeasonSeriesAvg, "past_2_season_series_avg", past2SeasonSeriesAvg, "past_3_season_series_avg", past3SeasonSeriesAvg, "labels", series.Labels)
predictedSeries := p.getPredictedSeries( predictedSeries := p.getPredictedSeries(
series, series,

View File

@@ -18,7 +18,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux" "github.com/gorilla/mux"
"go.uber.org/zap" "log/slog"
) )
type CloudIntegrationConnectionParamsResponse struct { type CloudIntegrationConnectionParamsResponse struct {
@@ -71,7 +71,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
// Return the API Key (PAT) even if the rest of the params can not be deduced. // Return the API Key (PAT) even if the rest of the params can not be deduced.
// Params not returned from here will be requested from the user via form inputs. // Params not returned from here will be requested from the user via form inputs.
// This enables gracefully degraded but working experience even for non-cloud deployments. // This enables gracefully degraded but working experience even for non-cloud deployments.
zap.L().Info("ingestion params and signoz api url can not be deduced since no license was found") slog.InfoContext(r.Context(), "ingestion params and signoz api url can not be deduced since no license was found")
ah.Respond(w, result) ah.Respond(w, result)
return return
} }
@@ -103,7 +103,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
result.IngestionKey = ingestionKey result.IngestionKey = ingestionKey
} else { } else {
zap.L().Info("ingestion key can't be deduced since no gateway url has been configured") slog.InfoContext(r.Context(), "ingestion key can't be deduced since no gateway url has been configured")
} }
ah.Respond(w, result) ah.Respond(w, result)
@@ -138,9 +138,8 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
} }
} }
zap.L().Info( slog.InfoContext(ctx, "no PAT found for cloud integration, creating a new one",
"no PAT found for cloud integration, creating a new one", "cloud_provider", cloudProvider,
zap.String("cloudProvider", cloudProvider),
) )
newPAT, err := types.NewStorableAPIKey( newPAT, err := types.NewStorableAPIKey(
@@ -287,9 +286,8 @@ func getOrCreateCloudProviderIngestionKey(
} }
} }
zap.L().Info( slog.InfoContext(ctx, "no existing ingestion key found for cloud integration, creating a new one",
"no existing ingestion key found for cloud integration, creating a new one", "cloud_provider", cloudProvider,
zap.String("cloudProvider", cloudProvider),
) )
createKeyResult, apiErr := requestGateway[createIngestionKeyResponse]( createKeyResult, apiErr := requestGateway[createIngestionKeyResponse](
ctx, gatewayUrl, licenseKey, "/v1/workspaces/me/keys", ctx, gatewayUrl, licenseKey, "/v1/workspaces/me/keys",

View File

@@ -15,7 +15,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/featuretypes" "github.com/SigNoz/signoz/pkg/types/featuretypes"
"github.com/SigNoz/signoz/pkg/types/licensetypes" "github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap" "log/slog"
) )
func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) { func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
@@ -35,23 +35,23 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
} }
if constants.FetchFeatures == "true" { if constants.FetchFeatures == "true" {
zap.L().Debug("fetching license") slog.DebugContext(ctx, "fetching license")
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID) license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
if err != nil { if err != nil {
zap.L().Error("failed to fetch license", zap.Error(err)) slog.ErrorContext(ctx, "failed to fetch license", "error", err)
} else if license == nil { } else if license == nil {
zap.L().Debug("no active license found") slog.DebugContext(ctx, "no active license found")
} else { } else {
licenseKey := license.Key licenseKey := license.Key
zap.L().Debug("fetching zeus features") slog.DebugContext(ctx, "fetching zeus features")
zeusFeatures, err := fetchZeusFeatures(constants.ZeusFeaturesURL, licenseKey) zeusFeatures, err := fetchZeusFeatures(constants.ZeusFeaturesURL, licenseKey)
if err == nil { if err == nil {
zap.L().Debug("fetched zeus features", zap.Any("features", zeusFeatures)) slog.DebugContext(ctx, "fetched zeus features", "features", zeusFeatures)
// merge featureSet and zeusFeatures in featureSet with higher priority to zeusFeatures // merge featureSet and zeusFeatures in featureSet with higher priority to zeusFeatures
featureSet = MergeFeatureSets(zeusFeatures, featureSet) featureSet = MergeFeatureSets(zeusFeatures, featureSet)
} else { } else {
zap.L().Error("failed to fetch zeus features", zap.Error(err)) slog.ErrorContext(ctx, "failed to fetch zeus features", "error", err)
} }
} }
} }

View File

@@ -14,7 +14,7 @@ import (
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap" "log/slog"
) )
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) { func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
@@ -35,7 +35,7 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
queryRangeParams, apiErrorObj := baseapp.ParseQueryRangeParams(r) queryRangeParams, apiErrorObj := baseapp.ParseQueryRangeParams(r)
if apiErrorObj != nil { if apiErrorObj != nil {
zap.L().Error("error parsing metric query range params", zap.Error(apiErrorObj.Err)) slog.ErrorContext(r.Context(), "error parsing metric query range params", "error", apiErrorObj.Err)
RespondError(w, apiErrorObj, nil) RespondError(w, apiErrorObj, nil)
return return
} }
@@ -44,7 +44,7 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
// add temporality for each metric // add temporality for each metric
temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams) temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams)
if temporalityErr != nil { if temporalityErr != nil {
zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr)) slog.ErrorContext(r.Context(), "error while adding temporality for metrics", "error", temporalityErr)
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil) RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
return return
} }

View File

@@ -47,7 +47,7 @@ import (
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces" baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
baserules "github.com/SigNoz/signoz/pkg/query-service/rules" baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils" "github.com/SigNoz/signoz/pkg/query-service/utils"
"go.uber.org/zap" "log/slog"
) )
// Server runs HTTP, Mux and a grpc server // Server runs HTTP, Mux and a grpc server
@@ -83,6 +83,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
} }
reader := clickhouseReader.NewReader( reader := clickhouseReader.NewReader(
signoz.Instrumentation.Logger(),
signoz.SQLStore, signoz.SQLStore,
signoz.TelemetryStore, signoz.TelemetryStore,
signoz.Prometheus, signoz.Prometheus,
@@ -278,7 +279,7 @@ func (s *Server) initListeners() error {
return err return err
} }
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort)) slog.Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort))
return nil return nil
} }
@@ -298,31 +299,31 @@ func (s *Server) Start(ctx context.Context) error {
} }
go func() { go func() {
zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.httpHostPort)) slog.Info("Starting HTTP server", "port", httpPort, "addr", s.httpHostPort)
switch err := s.httpServer.Serve(s.httpConn); err { switch err := s.httpServer.Serve(s.httpConn); err {
case nil, http.ErrServerClosed, cmux.ErrListenerClosed: case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
// normal exit, nothing to do // normal exit, nothing to do
default: default:
zap.L().Error("Could not start HTTP server", zap.Error(err)) slog.Error("Could not start HTTP server", "error", err)
} }
s.unavailableChannel <- healthcheck.Unavailable s.unavailableChannel <- healthcheck.Unavailable
}() }()
go func() { go func() {
zap.L().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort)) slog.Info("Starting pprof server", "addr", baseconst.DebugHttpPort)
err = http.ListenAndServe(baseconst.DebugHttpPort, nil) err = http.ListenAndServe(baseconst.DebugHttpPort, nil)
if err != nil { if err != nil {
zap.L().Error("Could not start pprof server", zap.Error(err)) slog.Error("Could not start pprof server", "error", err)
} }
}() }()
go func() { go func() {
zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint)) slog.Info("Starting OpAmp Websocket server", "addr", baseconst.OpAmpWsEndpoint)
err := s.opampServer.Start(baseconst.OpAmpWsEndpoint) err := s.opampServer.Start(baseconst.OpAmpWsEndpoint)
if err != nil { if err != nil {
zap.L().Error("opamp ws server failed to start", zap.Error(err)) slog.Error("opamp ws server failed to start", "error", err)
s.unavailableChannel <- healthcheck.Unavailable s.unavailableChannel <- healthcheck.Unavailable
} }
}() }()
@@ -358,10 +359,9 @@ func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertma
MetadataStore: metadataStore, MetadataStore: metadataStore,
Prometheus: prometheus, Prometheus: prometheus,
Context: context.Background(), Context: context.Background(),
Logger: zap.L(),
Reader: ch, Reader: ch,
Querier: querier, Querier: querier,
SLogger: providerSettings.Logger, Logger: providerSettings.Logger,
Cache: cache, Cache: cache,
EvalDelay: baseconst.GetEvalDelay(), EvalDelay: baseconst.GetEvalDelay(),
PrepareTaskFunc: rules.PrepareTaskFunc, PrepareTaskFunc: rules.PrepareTaskFunc,
@@ -380,7 +380,7 @@ func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertma
return nil, fmt.Errorf("rule manager error: %v", err) return nil, fmt.Errorf("rule manager error: %v", err)
} }
zap.L().Info("rules manager is ready") slog.Info("rules manager is ready")
return manager, nil return manager, nil
} }

View File

@@ -2,6 +2,7 @@ package rules
import ( import (
"context" "context"
"log/slog"
"testing" "testing"
"time" "time"
@@ -116,7 +117,7 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil) telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
options := clickhouseReader.NewOptions("primaryNamespace") options := clickhouseReader.NewOptions("primaryNamespace")
reader := clickhouseReader.NewReader(nil, telemetryStore, nil, "", time.Second, nil, nil, options) reader := clickhouseReader.NewReader(slog.Default(), nil, telemetryStore, nil, "", time.Second, nil, nil, options)
rule, err := NewAnomalyRule( rule, err := NewAnomalyRule(
"test-anomaly-rule", "test-anomaly-rule",
@@ -247,7 +248,7 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil) telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
options := clickhouseReader.NewOptions("primaryNamespace") options := clickhouseReader.NewOptions("primaryNamespace")
reader := clickhouseReader.NewReader(nil, telemetryStore, nil, "", time.Second, nil, nil, options) reader := clickhouseReader.NewReader(slog.Default(), nil, telemetryStore, nil, "", time.Second, nil, nil, options)
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil) rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil)
require.NoError(t, err) require.NoError(t, err)

View File

@@ -13,7 +13,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/ruletypes" "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid" "github.com/google/uuid"
"go.uber.org/zap" "log/slog"
) )
func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) { func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) {
@@ -34,7 +34,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
opts.Rule, opts.Rule,
opts.Reader, opts.Reader,
opts.Querier, opts.Querier,
opts.SLogger, opts.Logger,
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay), baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
baserules.WithSQLStore(opts.SQLStore), baserules.WithSQLStore(opts.SQLStore),
baserules.WithQueryParser(opts.ManagerOpts.QueryParser), baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
@@ -57,7 +57,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
ruleId, ruleId,
opts.OrgID, opts.OrgID,
opts.Rule, opts.Rule,
opts.SLogger, opts.Logger,
opts.Reader, opts.Reader,
opts.ManagerOpts.Prometheus, opts.ManagerOpts.Prometheus,
baserules.WithSQLStore(opts.SQLStore), baserules.WithSQLStore(opts.SQLStore),
@@ -82,7 +82,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
opts.Rule, opts.Rule,
opts.Reader, opts.Reader,
opts.Querier, opts.Querier,
opts.SLogger, opts.Logger,
opts.Cache, opts.Cache,
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay), baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
baserules.WithSQLStore(opts.SQLStore), baserules.WithSQLStore(opts.SQLStore),
@@ -142,7 +142,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
parsedRule, parsedRule,
opts.Reader, opts.Reader,
opts.Querier, opts.Querier,
opts.SLogger, opts.Logger,
baserules.WithSendAlways(), baserules.WithSendAlways(),
baserules.WithSendUnmatched(), baserules.WithSendUnmatched(),
baserules.WithSQLStore(opts.SQLStore), baserules.WithSQLStore(opts.SQLStore),
@@ -151,7 +151,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
) )
if err != nil { if err != nil {
zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", alertname), zap.Error(err)) slog.Error("failed to prepare a new threshold rule for test", "name", alertname, "error", err)
return 0, basemodel.BadRequest(err) return 0, basemodel.BadRequest(err)
} }
@@ -162,7 +162,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
alertname, alertname,
opts.OrgID, opts.OrgID,
parsedRule, parsedRule,
opts.SLogger, opts.Logger,
opts.Reader, opts.Reader,
opts.ManagerOpts.Prometheus, opts.ManagerOpts.Prometheus,
baserules.WithSendAlways(), baserules.WithSendAlways(),
@@ -173,7 +173,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
) )
if err != nil { if err != nil {
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", alertname), zap.Error(err)) slog.Error("failed to prepare a new promql rule for test", "name", alertname, "error", err)
return 0, basemodel.BadRequest(err) return 0, basemodel.BadRequest(err)
} }
} else if parsedRule.RuleType == ruletypes.RuleTypeAnomaly { } else if parsedRule.RuleType == ruletypes.RuleTypeAnomaly {
@@ -184,7 +184,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
parsedRule, parsedRule,
opts.Reader, opts.Reader,
opts.Querier, opts.Querier,
opts.SLogger, opts.Logger,
opts.Cache, opts.Cache,
baserules.WithSendAlways(), baserules.WithSendAlways(),
baserules.WithSendUnmatched(), baserules.WithSendUnmatched(),
@@ -193,7 +193,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore), baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
) )
if err != nil { if err != nil {
zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", alertname), zap.Error(err)) slog.Error("failed to prepare a new anomaly rule for test", "name", alertname, "error", err)
return 0, basemodel.BadRequest(err) return 0, basemodel.BadRequest(err)
} }
} else { } else {
@@ -205,7 +205,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
alertsFound, err := rule.Eval(ctx, ts) alertsFound, err := rule.Eval(ctx, ts)
if err != nil { if err != nil {
zap.L().Error("evaluating rule failed", zap.String("rule", rule.Name()), zap.Error(err)) slog.Error("evaluating rule failed", "rule", rule.Name(), "error", err)
return 0, basemodel.InternalError(fmt.Errorf("rule evaluation failed")) return 0, basemodel.InternalError(fmt.Errorf("rule evaluation failed"))
} }
rule.SendAlerts(ctx, ts, 0, time.Minute, opts.NotifyFunc) rule.SendAlerts(ctx, ts, 0, time.Minute, opts.NotifyFunc)

View File

@@ -8,12 +8,12 @@ import (
"sync/atomic" "sync/atomic"
"time" "time"
"log/slog"
"github.com/ClickHouse/clickhouse-go/v2" "github.com/ClickHouse/clickhouse-go/v2"
"github.com/go-co-op/gocron" "github.com/go-co-op/gocron"
"github.com/google/uuid" "github.com/google/uuid"
"go.uber.org/zap"
"github.com/SigNoz/signoz/ee/query-service/model" "github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/pkg/licensing" "github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/organization" "github.com/SigNoz/signoz/pkg/modules/organization"
@@ -76,19 +76,19 @@ func (lm *Manager) Start(ctx context.Context) error {
func (lm *Manager) UploadUsage(ctx context.Context) { func (lm *Manager) UploadUsage(ctx context.Context) {
organizations, err := lm.orgGetter.ListByOwnedKeyRange(ctx) organizations, err := lm.orgGetter.ListByOwnedKeyRange(ctx)
if err != nil { if err != nil {
zap.L().Error("failed to get organizations", zap.Error(err)) slog.ErrorContext(ctx, "failed to get organizations", "error", err)
return return
} }
for _, organization := range organizations { for _, organization := range organizations {
// check if license is present or not // check if license is present or not
license, err := lm.licenseService.GetActive(ctx, organization.ID) license, err := lm.licenseService.GetActive(ctx, organization.ID)
if err != nil { if err != nil {
zap.L().Error("failed to get active license", zap.Error(err)) slog.ErrorContext(ctx, "failed to get active license", "error", err)
return return
} }
if license == nil { if license == nil {
// we will not start the usage reporting if license is not present. // we will not start the usage reporting if license is not present.
zap.L().Info("no license present, skipping usage reporting") slog.InfoContext(ctx, "no license present, skipping usage reporting")
return return
} }
@@ -115,7 +115,7 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
dbusages := []model.UsageDB{} dbusages := []model.UsageDB{}
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour))) err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
if err != nil && !strings.Contains(err.Error(), "doesn't exist") { if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err)) slog.ErrorContext(ctx, "failed to get usage from clickhouse", "error", err)
return return
} }
for _, u := range dbusages { for _, u := range dbusages {
@@ -125,24 +125,24 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
} }
if len(usages) <= 0 { if len(usages) <= 0 {
zap.L().Info("no snapshots to upload, skipping.") slog.InfoContext(ctx, "no snapshots to upload, skipping")
return return
} }
zap.L().Info("uploading usage data") slog.InfoContext(ctx, "uploading usage data")
usagesPayload := []model.Usage{} usagesPayload := []model.Usage{}
for _, usage := range usages { for _, usage := range usages {
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data)) usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
if err != nil { if err != nil {
zap.L().Error("error while decrypting usage data: %v", zap.Error(err)) slog.ErrorContext(ctx, "error while decrypting usage data", "error", err)
return return
} }
usageData := model.Usage{} usageData := model.Usage{}
err = json.Unmarshal(usageDataBytes, &usageData) err = json.Unmarshal(usageDataBytes, &usageData)
if err != nil { if err != nil {
zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err)) slog.ErrorContext(ctx, "error while unmarshalling usage data", "error", err)
return return
} }
@@ -163,13 +163,13 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
body, errv2 := json.Marshal(payload) body, errv2 := json.Marshal(payload)
if errv2 != nil { if errv2 != nil {
zap.L().Error("error while marshalling usage payload: %v", zap.Error(errv2)) slog.ErrorContext(ctx, "error while marshalling usage payload", "error", errv2)
return return
} }
errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body) errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body)
if errv2 != nil { if errv2 != nil {
zap.L().Error("failed to upload usage: %v", zap.Error(errv2)) slog.ErrorContext(ctx, "failed to upload usage", "error", errv2)
// not returning error here since it is captured in the failed count // not returning error here since it is captured in the failed count
return return
} }
@@ -179,7 +179,7 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
func (lm *Manager) Stop(ctx context.Context) { func (lm *Manager) Stop(ctx context.Context) {
lm.scheduler.Stop() lm.scheduler.Stop()
zap.L().Info("sending usage data before shutting down") slog.InfoContext(ctx, "sending usage data before shutting down")
// send usage before shutting down // send usage before shutting down
lm.UploadUsage(ctx) lm.UploadUsage(ctx)
atomic.StoreUint32(&locker, stateUnlocked) atomic.StoreUint32(&locker, stateUnlocked)

View File

@@ -0,0 +1,29 @@
import { PropsWithChildren } from 'react';
type CommonProps = PropsWithChildren<{
className?: string;
minSize?: number;
maxSize?: number;
defaultSize?: number;
direction?: 'horizontal' | 'vertical';
autoSaveId?: string;
withHandle?: boolean;
}>;
export function ResizablePanelGroup({
children,
className,
}: CommonProps): JSX.Element {
return <div className={className}>{children}</div>;
}
export function ResizablePanel({
children,
className,
}: CommonProps): JSX.Element {
return <div className={className}>{children}</div>;
}
export function ResizableHandle({ className }: CommonProps): JSX.Element {
return <div className={className} />;
}

View File

@@ -14,6 +14,7 @@ const config: Config.InitialOptions = {
'\\.(css|less|scss)$': '<rootDir>/__mocks__/cssMock.ts', '\\.(css|less|scss)$': '<rootDir>/__mocks__/cssMock.ts',
'\\.md$': '<rootDir>/__mocks__/cssMock.ts', '\\.md$': '<rootDir>/__mocks__/cssMock.ts',
'^uplot$': '<rootDir>/__mocks__/uplotMock.ts', '^uplot$': '<rootDir>/__mocks__/uplotMock.ts',
'^@signozhq/resizable$': '<rootDir>/__mocks__/resizableMock.tsx',
'^hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH, '^hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
'^src/hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH, '^src/hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
'^.*/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH, '^.*/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,

View File

@@ -64,7 +64,7 @@
"@signozhq/sonner": "0.1.0", "@signozhq/sonner": "0.1.0",
"@signozhq/switch": "0.0.2", "@signozhq/switch": "0.0.2",
"@signozhq/table": "0.3.7", "@signozhq/table": "0.3.7",
"@signozhq/toggle-group": "^0.0.1", "@signozhq/toggle-group": "0.0.1",
"@signozhq/tooltip": "0.0.2", "@signozhq/tooltip": "0.0.2",
"@tanstack/react-table": "8.20.6", "@tanstack/react-table": "8.20.6",
"@tanstack/react-virtual": "3.11.2", "@tanstack/react-virtual": "3.11.2",

File diff suppressed because it is too large Load Diff

After

Width:  |  Height:  |  Size: 214 KiB

View File

@@ -1,5 +1,6 @@
// ** Helpers // ** Helpers
import { MetrictypesTypeDTO } from 'api/generated/services/sigNoz.schemas'; import { MetrictypesTypeDTO } from 'api/generated/services/sigNoz.schemas';
import { defaultTraceSelectedColumns } from 'container/OptionsMenu/constants';
import { createIdFromObjectFields } from 'lib/createIdFromObjectFields'; import { createIdFromObjectFields } from 'lib/createIdFromObjectFields';
import { createNewBuilderItemName } from 'lib/newQueryBuilder/createNewBuilderItemName'; import { createNewBuilderItemName } from 'lib/newQueryBuilder/createNewBuilderItemName';
import { IAttributeValuesResponse } from 'types/api/queryBuilder/getAttributesValues'; import { IAttributeValuesResponse } from 'types/api/queryBuilder/getAttributesValues';
@@ -548,3 +549,49 @@ export const DATA_TYPE_VS_ATTRIBUTE_VALUES_KEY: Record<
[DataTypes.ArrayBool]: 'boolAttributeValues', [DataTypes.ArrayBool]: 'boolAttributeValues',
[DataTypes.EMPTY]: 'stringAttributeValues', [DataTypes.EMPTY]: 'stringAttributeValues',
}; };
export const listViewInitialLogQuery: Query = {
...initialQueriesMap.logs,
builder: {
...initialQueriesMap.logs.builder,
queryData: [
{
...initialQueriesMap.logs.builder.queryData[0],
aggregateOperator: LogsAggregatorOperator.NOOP,
orderBy: [{ columnName: 'timestamp', order: 'desc' }],
offset: 0,
pageSize: 100,
},
],
},
};
export const PANEL_TYPES_INITIAL_QUERY: Record<PANEL_TYPES, Query> = {
[PANEL_TYPES.TIME_SERIES]: initialQueriesMap.metrics,
[PANEL_TYPES.VALUE]: initialQueriesMap.metrics,
[PANEL_TYPES.TABLE]: initialQueriesMap.metrics,
[PANEL_TYPES.LIST]: listViewInitialLogQuery,
[PANEL_TYPES.TRACE]: initialQueriesMap.traces,
[PANEL_TYPES.BAR]: initialQueriesMap.metrics,
[PANEL_TYPES.PIE]: initialQueriesMap.metrics,
[PANEL_TYPES.HISTOGRAM]: initialQueriesMap.metrics,
[PANEL_TYPES.EMPTY_WIDGET]: initialQueriesMap.metrics,
};
export const listViewInitialTraceQuery: Query = {
// it should be the above commented query
...initialQueriesMap.traces,
builder: {
...initialQueriesMap.traces.builder,
queryData: [
{
...initialQueriesMap.traces.builder.queryData[0],
aggregateOperator: LogsAggregatorOperator.NOOP,
orderBy: [{ columnName: 'timestamp', order: 'desc' }],
offset: 0,
pageSize: 10,
selectColumns: defaultTraceSelectedColumns,
},
],
},
};

View File

@@ -1,50 +0,0 @@
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import { defaultTraceSelectedColumns } from 'container/OptionsMenu/constants';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { LogsAggregatorOperator } from 'types/common/queryBuilder';
export const PANEL_TYPES_INITIAL_QUERY = {
[PANEL_TYPES.TIME_SERIES]: initialQueriesMap.metrics,
[PANEL_TYPES.VALUE]: initialQueriesMap.metrics,
[PANEL_TYPES.TABLE]: initialQueriesMap.metrics,
[PANEL_TYPES.LIST]: initialQueriesMap.logs,
[PANEL_TYPES.TRACE]: initialQueriesMap.traces,
[PANEL_TYPES.BAR]: initialQueriesMap.metrics,
[PANEL_TYPES.PIE]: initialQueriesMap.metrics,
[PANEL_TYPES.HISTOGRAM]: initialQueriesMap.metrics,
[PANEL_TYPES.EMPTY_WIDGET]: initialQueriesMap.metrics,
};
export const listViewInitialLogQuery: Query = {
...initialQueriesMap.logs,
builder: {
...initialQueriesMap.logs.builder,
queryData: [
{
...initialQueriesMap.logs.builder.queryData[0],
aggregateOperator: LogsAggregatorOperator.NOOP,
orderBy: [{ columnName: 'timestamp', order: 'desc' }],
offset: 0,
pageSize: 100,
},
],
},
};
export const listViewInitialTraceQuery: Query = {
// it should be the above commented query
...initialQueriesMap.traces,
builder: {
...initialQueriesMap.traces.builder,
queryData: [
{
...initialQueriesMap.traces.builder.queryData[0],
aggregateOperator: LogsAggregatorOperator.NOOP,
orderBy: [{ columnName: 'timestamp', order: 'desc' }],
offset: 0,
pageSize: 10,
selectColumns: defaultTraceSelectedColumns,
},
],
},
};

View File

@@ -1,94 +0,0 @@
import { Card, Modal } from 'antd';
import logEvent from 'api/common/logEvent';
import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
import createQueryParams from 'lib/createQueryParams';
import history from 'lib/history';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { LogsAggregatorOperator } from 'types/common/queryBuilder';
import { v4 as uuid } from 'uuid';
import { PANEL_TYPES_INITIAL_QUERY } from './constants';
import menuItems from './menuItems';
import { Text } from './styles';
import './ComponentSlider.styles.scss';
function DashboardGraphSlider(): JSX.Element {
const { handleToggleDashboardSlider, isDashboardSliderOpen } = useDashboard();
const onClickHandler = (name: PANEL_TYPES) => (): void => {
const id = uuid();
handleToggleDashboardSlider(false);
logEvent('Dashboard Detail: New panel type selected', {
// dashboardId: '',
// dashboardName: '',
// numberOfPanels: 0, // todo - at this point we don't know these attributes
panelType: name,
widgetId: id,
});
const queryParamsLog = {
graphType: name,
widgetId: id,
[QueryParams.compositeQuery]: JSON.stringify({
...PANEL_TYPES_INITIAL_QUERY[name],
builder: {
...PANEL_TYPES_INITIAL_QUERY[name].builder,
queryData: [
{
...PANEL_TYPES_INITIAL_QUERY[name].builder.queryData[0],
aggregateOperator: LogsAggregatorOperator.NOOP,
orderBy: [{ columnName: 'timestamp', order: 'desc' }],
offset: 0,
pageSize: 100,
},
],
},
}),
};
const queryParams = {
graphType: name,
widgetId: id,
[QueryParams.compositeQuery]: JSON.stringify(
PANEL_TYPES_INITIAL_QUERY[name],
),
};
if (name === PANEL_TYPES.LIST) {
history.push(
`${history.location.pathname}/new?${createQueryParams(queryParamsLog)}`,
);
} else {
history.push(
`${history.location.pathname}/new?${createQueryParams(queryParams)}`,
);
}
};
const handleCardClick = (panelType: PANEL_TYPES): void => {
onClickHandler(panelType)();
};
return (
<Modal
open={isDashboardSliderOpen}
onCancel={(): void => {
handleToggleDashboardSlider(false);
}}
rootClassName="graph-selection"
footer={null}
title="New Panel"
>
<div className="panel-selection">
{menuItems.map(({ name, icon, display }) => (
<Card onClick={(): void => handleCardClick(name)} id={name} key={name}>
{icon}
<Text>{display}</Text>
</Card>
))}
</div>
</Modal>
);
}
export default DashboardGraphSlider;

View File

@@ -1,41 +0,0 @@
import { Card as CardComponent, Typography } from 'antd';
import styled from 'styled-components';
export const Container = styled.div`
display: flex;
justify-content: right;
gap: 8px;
margin-bottom: 12px;
`;
export const Card = styled(CardComponent)`
min-height: 80px;
min-width: 120px;
overflow-y: auto;
cursor: pointer;
transition: transform 0.2s;
.ant-card-body {
padding: 12px;
height: 100%;
display: flex;
flex-direction: column;
justify-content: space-between;
align-items: center;
.ant-typography {
font-size: 12px;
font-weight: 600;
}
}
&:hover {
transform: scale(1.05);
border: 1px solid var(--bg-robin-400);
}
`;
export const Text = styled(Typography)`
text-align: center;
margin-top: 1rem;
`;

View File

@@ -182,9 +182,7 @@ describe('Dashboard landing page actions header tests', () => {
(useLocation as jest.Mock).mockReturnValue(mockLocation); (useLocation as jest.Mock).mockReturnValue(mockLocation);
const mockContextValue: IDashboardContext = { const mockContextValue: IDashboardContext = {
isDashboardSliderOpen: false,
isDashboardLocked: false, isDashboardLocked: false,
handleToggleDashboardSlider: jest.fn(),
handleDashboardLockToggle: jest.fn(), handleDashboardLockToggle: jest.fn(),
dashboardResponse: {} as IDashboardContext['dashboardResponse'], dashboardResponse: {} as IDashboardContext['dashboardResponse'],
selectedDashboard: (getDashboardById.data as unknown) as Dashboard, selectedDashboard: (getDashboardById.data as unknown) as Dashboard,

View File

@@ -40,6 +40,7 @@ import {
} from 'lucide-react'; } from 'lucide-react';
import { useAppContext } from 'providers/App/App'; import { useAppContext } from 'providers/App/App';
import { useDashboard } from 'providers/Dashboard/Dashboard'; import { useDashboard } from 'providers/Dashboard/Dashboard';
import { usePanelTypeSelectionModalStore } from 'providers/Dashboard/helpers/panelTypeSelectionModalHelper';
import { sortLayout } from 'providers/Dashboard/util'; import { sortLayout } from 'providers/Dashboard/util';
import { DashboardData } from 'types/api/dashboard/getAll'; import { DashboardData } from 'types/api/dashboard/getAll';
import { Props } from 'types/api/dashboard/update'; import { Props } from 'types/api/dashboard/update';
@@ -48,10 +49,10 @@ import { ComponentTypes } from 'utils/permission';
import { v4 as uuid } from 'uuid'; import { v4 as uuid } from 'uuid';
import DashboardHeader from '../components/DashboardHeader/DashboardHeader'; import DashboardHeader from '../components/DashboardHeader/DashboardHeader';
import DashboardGraphSlider from '../ComponentsSlider';
import DashboardSettings from '../DashboardSettings'; import DashboardSettings from '../DashboardSettings';
import { Base64Icons } from '../DashboardSettings/General/utils'; import { Base64Icons } from '../DashboardSettings/General/utils';
import DashboardVariableSelection from '../DashboardVariablesSelection'; import DashboardVariableSelection from '../DashboardVariablesSelection';
import PanelTypeSelectionModal from '../PanelTypeSelectionModal';
import SettingsDrawer from './SettingsDrawer'; import SettingsDrawer from './SettingsDrawer';
import { VariablesSettingsTab } from './types'; import { VariablesSettingsTab } from './types';
import { import {
@@ -69,6 +70,9 @@ interface DashboardDescriptionProps {
// eslint-disable-next-line sonarjs/cognitive-complexity // eslint-disable-next-line sonarjs/cognitive-complexity
function DashboardDescription(props: DashboardDescriptionProps): JSX.Element { function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
const { handle } = props; const { handle } = props;
const setIsPanelTypeSelectionModalOpen = usePanelTypeSelectionModalStore(
(s) => s.setIsPanelTypeSelectionModalOpen,
);
const { const {
selectedDashboard, selectedDashboard,
panelMap, panelMap,
@@ -77,7 +81,6 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
setLayouts, setLayouts,
isDashboardLocked, isDashboardLocked,
setSelectedDashboard, setSelectedDashboard,
handleToggleDashboardSlider,
handleDashboardLockToggle, handleDashboardLockToggle,
} = useDashboard(); } = useDashboard();
@@ -145,14 +148,14 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
const [addPanelPermission] = useComponentPermission(permissions, userRole); const [addPanelPermission] = useComponentPermission(permissions, userRole);
const onEmptyWidgetHandler = useCallback(() => { const onEmptyWidgetHandler = useCallback(() => {
handleToggleDashboardSlider(true); setIsPanelTypeSelectionModalOpen(true);
logEvent('Dashboard Detail: Add new panel clicked', { logEvent('Dashboard Detail: Add new panel clicked', {
dashboardId: selectedDashboard?.id, dashboardId: selectedDashboard?.id,
dashboardName: selectedDashboard?.data.title, dashboardName: selectedDashboard?.data.title,
numberOfPanels: selectedDashboard?.data.widgets?.length, numberOfPanels: selectedDashboard?.data.widgets?.length,
}); });
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, [handleToggleDashboardSlider]); }, [setIsPanelTypeSelectionModalOpen]);
const handleLockDashboardToggle = (): void => { const handleLockDashboardToggle = (): void => {
setIsDashbordSettingsOpen(false); setIsDashbordSettingsOpen(false);
@@ -521,7 +524,7 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
<DashboardVariableSelection /> <DashboardVariableSelection />
</section> </section>
)} )}
<DashboardGraphSlider /> <PanelTypeSelectionModal />
<Modal <Modal
open={isRenameDashboardOpen} open={isRenameDashboardOpen}

View File

@@ -1,4 +1,4 @@
.graph-selection { .panel-type-selection-modal {
.ant-modal-content { .ant-modal-content {
width: 515px; width: 515px;
max-height: 646px; max-height: 646px;
@@ -76,6 +76,11 @@
content: none; content: none;
} }
} }
.panel-type-text {
text-align: center;
margin-top: 1rem;
}
} }
} }
@@ -114,7 +119,7 @@
} }
.lightMode { .lightMode {
.graph-selection { .panel-type-selection-modal {
.ant-modal-content { .ant-modal-content {
border: 1px solid var(--bg-vanilla-300); border: 1px solid var(--bg-vanilla-300);
background: var(--bg-vanilla-100); background: var(--bg-vanilla-100);

View File

@@ -0,0 +1,68 @@
import { memo } from 'react';
import { Card, Modal, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import { QueryParams } from 'constants/query';
import { PANEL_TYPES, PANEL_TYPES_INITIAL_QUERY } from 'constants/queryBuilder';
import createQueryParams from 'lib/createQueryParams';
import history from 'lib/history';
import { usePanelTypeSelectionModalStore } from 'providers/Dashboard/helpers/panelTypeSelectionModalHelper';
import { v4 as uuid } from 'uuid';
import { PanelTypesWithData } from './menuItems';
import './PanelTypeSelectionModal.styles.scss';
function PanelTypeSelectionModal(): JSX.Element {
const {
isPanelTypeSelectionModalOpen,
setIsPanelTypeSelectionModalOpen,
} = usePanelTypeSelectionModalStore();
const onClickHandler = (name: PANEL_TYPES) => (): void => {
const id = uuid();
setIsPanelTypeSelectionModalOpen(false);
logEvent('Dashboard Detail: New panel type selected', {
panelType: name,
widgetId: id,
});
const queryParams = {
graphType: name,
widgetId: id,
[QueryParams.compositeQuery]: JSON.stringify(
PANEL_TYPES_INITIAL_QUERY[name],
),
};
history.push(
`${history.location.pathname}/new?${createQueryParams(queryParams)}`,
);
};
const handleCardClick = (panelType: PANEL_TYPES): void => {
onClickHandler(panelType)();
};
return (
<Modal
open={isPanelTypeSelectionModalOpen}
onCancel={(): void => {
setIsPanelTypeSelectionModalOpen(false);
}}
rootClassName="panel-type-selection-modal"
footer={null}
title="New Panel"
>
<div className="panel-selection">
{PanelTypesWithData.map(({ name, icon, display }) => (
<Card onClick={(): void => handleCardClick(name)} id={name} key={name}>
{icon}
<Typography className="panel-type-text">{display}</Typography>
</Card>
))}
</div>
</Modal>
);
}
export default memo(PanelTypeSelectionModal);

View File

@@ -9,7 +9,7 @@ import {
Table, Table,
} from 'lucide-react'; } from 'lucide-react';
const Items: ItemsProps[] = [ export const PanelTypesWithData: ItemsProps[] = [
{ {
name: PANEL_TYPES.TIME_SERIES, name: PANEL_TYPES.TIME_SERIES,
icon: <LineChart size={16} color={Color.BG_ROBIN_400} />, icon: <LineChart size={16} color={Color.BG_ROBIN_400} />,
@@ -52,5 +52,3 @@ export interface ItemsProps {
icon: JSX.Element; icon: JSX.Element;
display: string; display: string;
} }
export default Items;

View File

@@ -224,7 +224,7 @@ describe('TimeSeriesPanel utils', () => {
}); });
}); });
it('uses DrawStyle.Line and VisibilityMode.Never when series has multiple valid points', () => { it('uses DrawStyle.Line and showPoints false when series has multiple valid points', () => {
const apiResponse = createApiResponse([ const apiResponse = createApiResponse([
{ {
metric: {}, metric: {},

View File

@@ -10,9 +10,9 @@ import getLabelName from 'lib/getLabelName';
import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin'; import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin';
import { import {
DrawStyle, DrawStyle,
FillMode,
LineInterpolation, LineInterpolation,
LineStyle, LineStyle,
VisibilityMode,
} from 'lib/uPlotV2/config/types'; } from 'lib/uPlotV2/config/types';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder'; import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import { isInvalidPlotValue } from 'lib/uPlotV2/utils/dataUtils'; import { isInvalidPlotValue } from 'lib/uPlotV2/utils/dataUtils';
@@ -124,12 +124,12 @@ export const prepareUPlotConfig = ({
label: label, label: label,
colorMapping: widget.customLegendColors ?? {}, colorMapping: widget.customLegendColors ?? {},
spanGaps: true, spanGaps: true,
lineStyle: LineStyle.Solid, lineStyle: widget.lineStyle || LineStyle.Solid,
lineInterpolation: LineInterpolation.Spline, lineInterpolation: widget.lineInterpolation || LineInterpolation.Spline,
showPoints: hasSingleValidPoint showPoints:
? VisibilityMode.Always widget.showPoints || hasSingleValidPoint ? true : !!widget.showPoints,
: VisibilityMode.Never,
pointSize: 5, pointSize: 5,
fillMode: widget.fillMode || FillMode.None,
isDarkMode, isDarkMode,
}); });
}); });

View File

@@ -9,17 +9,18 @@ import DashboardSettings from 'container/DashboardContainer/DashboardSettings';
import useComponentPermission from 'hooks/useComponentPermission'; import useComponentPermission from 'hooks/useComponentPermission';
import { useAppContext } from 'providers/App/App'; import { useAppContext } from 'providers/App/App';
import { useDashboard } from 'providers/Dashboard/Dashboard'; import { useDashboard } from 'providers/Dashboard/Dashboard';
import { usePanelTypeSelectionModalStore } from 'providers/Dashboard/helpers/panelTypeSelectionModalHelper';
import { ROLES, USER_ROLES } from 'types/roles'; import { ROLES, USER_ROLES } from 'types/roles';
import { ComponentTypes } from 'utils/permission'; import { ComponentTypes } from 'utils/permission';
import './DashboardEmptyState.styles.scss'; import './DashboardEmptyState.styles.scss';
export default function DashboardEmptyState(): JSX.Element { export default function DashboardEmptyState(): JSX.Element {
const { const setIsPanelTypeSelectionModalOpen = usePanelTypeSelectionModalStore(
selectedDashboard, (s) => s.setIsPanelTypeSelectionModalOpen,
isDashboardLocked, );
handleToggleDashboardSlider,
} = useDashboard(); const { selectedDashboard, isDashboardLocked } = useDashboard();
const variablesSettingsTabHandle = useRef<VariablesSettingsTab>(null); const variablesSettingsTabHandle = useRef<VariablesSettingsTab>(null);
const [isSettingsDrawerOpen, setIsSettingsDrawerOpen] = useState<boolean>( const [isSettingsDrawerOpen, setIsSettingsDrawerOpen] = useState<boolean>(
@@ -41,14 +42,14 @@ export default function DashboardEmptyState(): JSX.Element {
const [addPanelPermission] = useComponentPermission(permissions, userRole); const [addPanelPermission] = useComponentPermission(permissions, userRole);
const onEmptyWidgetHandler = useCallback(() => { const onEmptyWidgetHandler = useCallback(() => {
handleToggleDashboardSlider(true); setIsPanelTypeSelectionModalOpen(true);
logEvent('Dashboard Detail: Add new panel clicked', { logEvent('Dashboard Detail: Add new panel clicked', {
dashboardId: selectedDashboard?.id, dashboardId: selectedDashboard?.id,
dashboardName: selectedDashboard?.data.title, dashboardName: selectedDashboard?.data.title,
numberOfPanels: selectedDashboard?.data.widgets?.length, numberOfPanels: selectedDashboard?.data.widgets?.length,
}); });
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, [handleToggleDashboardSlider]); }, [setIsPanelTypeSelectionModalOpen]);
const onConfigureClick = useCallback((): void => { const onConfigureClick = useCallback((): void => {
setIsSettingsDrawerOpen(true); setIsSettingsDrawerOpen(true);

View File

@@ -2,7 +2,7 @@ import { useCallback } from 'react';
import { Select, Typography } from 'antd'; import { Select, Typography } from 'antd';
import { QueryParams } from 'constants/query'; import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder'; import { PANEL_TYPES } from 'constants/queryBuilder';
import GraphTypes from 'container/DashboardContainer/ComponentsSlider/menuItems'; import { PanelTypesWithData } from 'container/DashboardContainer/PanelTypeSelectionModal/menuItems';
import { handleQueryChange } from 'container/NewWidget/utils'; import { handleQueryChange } from 'container/NewWidget/utils';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { Query } from 'types/api/queryBuilder/queryBuilderData'; import { Query } from 'types/api/queryBuilder/queryBuilderData';
@@ -59,7 +59,7 @@ function PanelTypeSelector({
data-testid="panel-change-select" data-testid="panel-change-select"
disabled={disabled} disabled={disabled}
> >
{GraphTypes.map((item) => ( {PanelTypesWithData.map((item) => (
<Option key={item.name} value={item.name}> <Option key={item.name} value={item.name}>
<div className="view-panel-select-option"> <div className="view-panel-select-option">
<div className="icon">{item.icon}</div> <div className="icon">{item.icon}</div>

View File

@@ -5,6 +5,7 @@ import useComponentPermission from 'hooks/useComponentPermission';
import { EllipsisIcon, PenLine, Plus, X } from 'lucide-react'; import { EllipsisIcon, PenLine, Plus, X } from 'lucide-react';
import { useAppContext } from 'providers/App/App'; import { useAppContext } from 'providers/App/App';
import { useDashboard } from 'providers/Dashboard/Dashboard'; import { useDashboard } from 'providers/Dashboard/Dashboard';
import { usePanelTypeSelectionModalStore } from 'providers/Dashboard/helpers/panelTypeSelectionModalHelper';
import { setSelectedRowWidgetId } from 'providers/Dashboard/helpers/selectedRowWidgetIdHelper'; import { setSelectedRowWidgetId } from 'providers/Dashboard/helpers/selectedRowWidgetIdHelper';
import { ROLES, USER_ROLES } from 'types/roles'; import { ROLES, USER_ROLES } from 'types/roles';
import { ComponentTypes } from 'utils/permission'; import { ComponentTypes } from 'utils/permission';
@@ -34,11 +35,11 @@ export function WidgetRowHeader(props: WidgetRowHeaderProps): JSX.Element {
} = props; } = props;
const [isRowSettingsOpen, setIsRowSettingsOpen] = useState<boolean>(false); const [isRowSettingsOpen, setIsRowSettingsOpen] = useState<boolean>(false);
const { const setIsPanelTypeSelectionModalOpen = usePanelTypeSelectionModalStore(
handleToggleDashboardSlider, (s) => s.setIsPanelTypeSelectionModalOpen,
selectedDashboard, );
isDashboardLocked,
} = useDashboard(); const { selectedDashboard, isDashboardLocked } = useDashboard();
const permissions: ComponentTypes[] = ['add_panel']; const permissions: ComponentTypes[] = ['add_panel'];
const { user } = useAppContext(); const { user } = useAppContext();
@@ -87,7 +88,7 @@ export function WidgetRowHeader(props: WidgetRowHeaderProps): JSX.Element {
} }
setSelectedRowWidgetId(selectedDashboard.id, id); setSelectedRowWidgetId(selectedDashboard.id, id);
handleToggleDashboardSlider(true); setIsPanelTypeSelectionModalOpen(true);
}} }}
> >
New Panel New Panel

View File

@@ -15,6 +15,7 @@ import ROUTES from 'constants/routes';
import { getMetricsListQuery } from 'container/MetricsExplorer/Summary/utils'; import { getMetricsListQuery } from 'container/MetricsExplorer/Summary/utils';
import { useGetMetricsList } from 'hooks/metricsExplorer/useGetMetricsList'; import { useGetMetricsList } from 'hooks/metricsExplorer/useGetMetricsList';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange'; import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
import { useIsDarkMode } from 'hooks/useDarkMode';
import history from 'lib/history'; import history from 'lib/history';
import cloneDeep from 'lodash-es/cloneDeep'; import cloneDeep from 'lodash-es/cloneDeep';
import { AnimatePresence } from 'motion/react'; import { AnimatePresence } from 'motion/react';
@@ -43,6 +44,7 @@ const homeInterval = 30 * 60 * 1000;
// eslint-disable-next-line sonarjs/cognitive-complexity // eslint-disable-next-line sonarjs/cognitive-complexity
export default function Home(): JSX.Element { export default function Home(): JSX.Element {
const { user } = useAppContext(); const { user } = useAppContext();
const isDarkMode = useIsDarkMode();
const [startTime, setStartTime] = useState<number | null>(null); const [startTime, setStartTime] = useState<number | null>(null);
const [endTime, setEndTime] = useState<number | null>(null); const [endTime, setEndTime] = useState<number | null>(null);
@@ -680,7 +682,11 @@ export default function Home(): JSX.Element {
<div className="checklist-img-container"> <div className="checklist-img-container">
<img <img
src="/Images/allInOne.svg" src={
isDarkMode
? '/Images/allInOne.svg'
: '/Images/allInOneLightMode.svg'
}
alt="checklist-img" alt="checklist-img"
className="checklist-img" className="checklist-img"
/> />

View File

@@ -65,6 +65,35 @@
} }
} }
.new-widget-container {
.resizable-panel-left-container {
overflow-x: hidden;
overflow-y: auto;
}
.resizable-panel-right-container {
overflow-y: auto !important;
min-width: 350px;
&::-webkit-scrollbar {
width: 0.3rem;
}
&::-webkit-scrollbar-thumb {
background: rgb(136, 136, 136);
border-radius: 0.625rem;
}
&::-webkit-scrollbar-track {
background: transparent;
}
}
.widget-resizable-panel-group {
.widget-resizable-handle {
height: 100vh;
}
}
}
.lightMode { .lightMode {
.edit-header { .edit-header {
border-bottom: 1px solid var(--bg-vanilla-300); border-bottom: 1px solid var(--bg-vanilla-300);
@@ -81,4 +110,11 @@
} }
} }
} }
.widget-resizable-panel-group {
.bg-border {
background: var(--bg-vanilla-300);
border-color: var(--bg-vanilla-300);
}
}
} }

View File

@@ -0,0 +1,21 @@
.fill-mode-selector {
.fill-mode-icon {
width: 24px;
height: 24px;
}
.fill-mode-label {
text-transform: uppercase;
font-size: 12px;
font-weight: 500;
color: var(--bg-vanilla-400);
}
}
.lightMode {
.fill-mode-selector {
.fill-mode-label {
color: var(--bg-ink-400);
}
}
}

View File

@@ -0,0 +1,94 @@
import { ToggleGroup, ToggleGroupItem } from '@signozhq/toggle-group';
import { Typography } from 'antd';
import { FillMode } from 'lib/uPlotV2/config/types';
import './FillModeSelector.styles.scss';
interface FillModeSelectorProps {
value: FillMode;
onChange: (value: FillMode) => void;
}
export function FillModeSelector({
value,
onChange,
}: FillModeSelectorProps): JSX.Element {
return (
<section className="fill-mode-selector control-container">
<Typography.Text className="section-heading">Fill mode</Typography.Text>
<ToggleGroup
type="single"
value={value}
variant="outline"
size="lg"
onValueChange={(newValue): void => {
if (newValue) {
onChange(newValue as FillMode);
}
}}
>
<ToggleGroupItem value={FillMode.None} aria-label="None" title="None">
<svg
className="fill-mode-icon"
viewBox="0 0 48 48"
fill="none"
stroke="#888"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
>
<rect x="8" y="16" width="32" height="16" stroke="#888" fill="none" />
</svg>
<Typography.Text className="section-heading-small">None</Typography.Text>
</ToggleGroupItem>
<ToggleGroupItem value={FillMode.Solid} aria-label="Solid" title="Solid">
<svg
className="fill-mode-icon"
viewBox="0 0 48 48"
fill="none"
stroke="#888"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
>
<rect x="8" y="16" width="32" height="16" fill="#888" />
</svg>
<Typography.Text className="section-heading-small">Solid</Typography.Text>
</ToggleGroupItem>
<ToggleGroupItem
value={FillMode.Gradient}
aria-label="Gradient"
title="Gradient"
>
<svg
className="fill-mode-icon"
viewBox="0 0 48 48"
fill="none"
stroke="#888"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
>
<defs>
<linearGradient id="fill-gradient" x1="0" y1="0" x2="1" y2="0">
<stop offset="0%" stopColor="#888" stopOpacity="0.2" />
<stop offset="100%" stopColor="#888" stopOpacity="0.8" />
</linearGradient>
</defs>
<rect
x="8"
y="16"
width="32"
height="16"
fill="url(#fill-gradient)"
stroke="#888"
/>
</svg>
<Typography.Text className="section-heading-small">
Gradient
</Typography.Text>
</ToggleGroupItem>
</ToggleGroup>
</section>
);
}

View File

@@ -0,0 +1,21 @@
.line-interpolation-selector {
.line-interpolation-icon {
width: 24px;
height: 24px;
}
.line-interpolation-label {
text-transform: uppercase;
font-size: 12px;
font-weight: 500;
color: var(--bg-vanilla-400);
}
}
.lightMode {
.line-interpolation-selector {
.line-interpolation-label {
color: var(--bg-ink-400);
}
}
}

View File

@@ -0,0 +1,110 @@
import { ToggleGroup, ToggleGroupItem } from '@signozhq/toggle-group';
import { Typography } from 'antd';
import { LineInterpolation } from 'lib/uPlotV2/config/types';
import './LineInterpolationSelector.styles.scss';
interface LineInterpolationSelectorProps {
value: LineInterpolation;
onChange: (value: LineInterpolation) => void;
}
export function LineInterpolationSelector({
value,
onChange,
}: LineInterpolationSelectorProps): JSX.Element {
return (
<section className="line-interpolation-selector control-container">
<Typography.Text className="section-heading">
Line interpolation
</Typography.Text>
<ToggleGroup
type="single"
value={value}
variant="outline"
size="lg"
onValueChange={(newValue): void => {
if (newValue) {
onChange(newValue as LineInterpolation);
}
}}
>
<ToggleGroupItem
value={LineInterpolation.Linear}
aria-label="Linear"
title="Linear"
>
<svg
className="line-interpolation-icon"
viewBox="0 0 48 48"
fill="none"
stroke="#888"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
>
<circle cx="8" cy="32" r="3" fill="#888" />
<circle cx="24" cy="16" r="3" fill="#888" />
<circle cx="40" cy="32" r="3" fill="#888" />
<path d="M8 32 L24 16 L40 32" stroke="#888" />
</svg>
</ToggleGroupItem>
<ToggleGroupItem value={LineInterpolation.Spline} aria-label="Spline">
<svg
className="line-interpolation-icon"
viewBox="0 0 48 48"
fill="none"
stroke="#888"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
>
<circle cx="8" cy="32" r="3" fill="#888" />
<circle cx="24" cy="16" r="3" fill="#888" />
<circle cx="40" cy="32" r="3" fill="#888" />
<path d="M8 32 C16 8, 32 8, 40 32" />
</svg>
</ToggleGroupItem>
<ToggleGroupItem
value={LineInterpolation.StepAfter}
aria-label="Step After"
>
<svg
className="line-interpolation-icon"
viewBox="0 0 48 48"
fill="none"
stroke="#888"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
>
<circle cx="8" cy="32" r="3" fill="#888" />
<circle cx="24" cy="16" r="3" fill="#888" />
<circle cx="40" cy="32" r="3" fill="#888" />
<path d="M8 32 V16 H24 V32 H40" />
</svg>
</ToggleGroupItem>
<ToggleGroupItem
value={LineInterpolation.StepBefore}
aria-label="Step Before"
>
<svg
className="line-interpolation-icon"
viewBox="0 0 48 48"
fill="none"
stroke="#888"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
>
<circle cx="8" cy="32" r="3" fill="#888" />
<circle cx="24" cy="16" r="3" fill="#888" />
<circle cx="40" cy="32" r="3" fill="#888" />
<path d="M8 32 H24 V16 H40 V32" />
</svg>
</ToggleGroupItem>
</ToggleGroup>
</section>
);
}

View File

@@ -0,0 +1,21 @@
.line-style-selector {
.line-style-icon {
width: 24px;
height: 24px;
}
.line-style-label {
text-transform: uppercase;
font-size: 12px;
font-weight: 500;
color: var(--bg-vanilla-400);
}
}
.lightMode {
.line-style-selector {
.line-style-label {
color: var(--bg-ink-400);
}
}
}

View File

@@ -0,0 +1,66 @@
import { ToggleGroup, ToggleGroupItem } from '@signozhq/toggle-group';
import { Typography } from 'antd';
import { LineStyle } from 'lib/uPlotV2/config/types';
import './LineStyleSelector.styles.scss';
interface LineStyleSelectorProps {
value: LineStyle;
onChange: (value: LineStyle) => void;
}
export function LineStyleSelector({
value,
onChange,
}: LineStyleSelectorProps): JSX.Element {
return (
<section className="line-style-selector control-container">
<Typography.Text className="section-heading">Line style</Typography.Text>
<ToggleGroup
type="single"
value={value}
variant="outline"
size="lg"
onValueChange={(newValue): void => {
if (newValue) {
onChange(newValue as LineStyle);
}
}}
>
<ToggleGroupItem value={LineStyle.Solid} aria-label="Solid" title="Solid">
<svg
className="line-style-icon"
viewBox="0 0 48 48"
fill="none"
stroke="#888"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
>
<path d="M8 24 L40 24" />
</svg>
<Typography.Text className="section-heading-small">Solid</Typography.Text>
</ToggleGroupItem>
<ToggleGroupItem
value={LineStyle.Dashed}
aria-label="Dashed"
title="Dashed"
>
<svg
className="line-style-icon"
viewBox="0 0 48 48"
fill="none"
stroke="#888"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
strokeDasharray="6 4"
>
<path d="M8 24 L40 24" />
</svg>
<Typography.Text className="section-heading-small">Dashed</Typography.Text>
</ToggleGroupItem>
</ToggleGroup>
</section>
);
}

View File

@@ -1,8 +1,30 @@
.right-container { .right-container {
display: flex; display: flex;
flex-direction: column; flex-direction: column;
font-family: 'Space Mono';
padding-bottom: 48px; padding-bottom: 48px;
.section-heading {
color: var(--bg-vanilla-400);
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 18px; /* 138.462% */
letter-spacing: 0.52px;
text-transform: uppercase;
}
.section-heading-small {
font-family: 'Space Mono';
color: var(--bg-vanilla-400);
font-size: 12px;
font-style: normal;
font-weight: 400;
word-break: initial;
line-height: 16px; /* 133.333% */
letter-spacing: 0.48px;
}
.header { .header {
display: flex; display: flex;
padding: 14px 14px 14px 12px; padding: 14px 14px 14px 12px;
@@ -34,17 +56,6 @@
.name-description { .name-description {
padding: 0 0 4px 0; padding: 0 0 4px 0;
.typography {
color: var(--bg-vanilla-400);
font-family: 'Space Mono';
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 18px; /* 138.462% */
letter-spacing: 0.52px;
text-transform: uppercase;
}
.name-input { .name-input {
display: flex; display: flex;
padding: 6px 6px 6px 8px; padding: 6px 6px 6px 8px;
@@ -90,15 +101,21 @@
display: flex; display: flex;
flex-direction: column; flex-direction: column;
.typography { .toggle-card {
color: var(--bg-vanilla-400); display: flex;
font-family: 'Space Mono'; flex-direction: row;
font-size: 13px; justify-content: space-between;
font-style: normal; align-items: center;
font-weight: 400; padding: 12px;
line-height: 18px; /* 138.462% */ border-radius: 2px;
letter-spacing: 0.52px; border: 1px solid var(--bg-slate-400);
text-transform: uppercase; background: var(--bg-ink-400);
}
.toggle-card-text-container {
display: flex;
flex-direction: column;
gap: 4px;
} }
.panel-type-select { .panel-type-select {
@@ -114,55 +131,16 @@
border: 1px solid var(--bg-slate-400); border: 1px solid var(--bg-slate-400);
background: var(--bg-ink-300); background: var(--bg-ink-300);
} }
.select-option {
display: flex;
align-items: center;
gap: 6px;
.icon {
display: flex;
align-items: center;
}
.display {
color: var(--bg-vanilla-100);
font-family: Inter;
font-size: 12px;
font-style: normal;
font-weight: 400;
line-height: 16px; /* 133.333% */
}
}
} }
.fill-gaps { .toggle-card-description {
display: flex; color: var(--bg-vanilla-400);
padding: 12px; font-family: Inter;
justify-content: space-between; font-size: 12px;
align-items: center; font-style: normal;
border-radius: 2px; font-weight: 400;
border: 1px solid var(--bg-slate-400); opacity: 0.6;
background: var(--bg-ink-400); line-height: 16px; /* 133.333% */
.fill-gaps-text {
color: var(--bg-vanilla-400);
font-family: 'Space Mono';
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 18px; /* 138.462% */
letter-spacing: 0.52px;
text-transform: uppercase;
}
.fill-gaps-text-description {
color: var(--bg-vanilla-400);
font-family: Inter;
font-size: 12px;
font-style: normal;
font-weight: 400;
opacity: 0.6;
line-height: 16px; /* 133.333% */
}
} }
.log-scale, .log-scale,
@@ -171,17 +149,6 @@
justify-content: space-between; justify-content: space-between;
} }
.panel-time-text {
color: var(--bg-vanilla-400);
font-family: 'Space Mono';
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 18px; /* 138.462% */
letter-spacing: 0.52px;
text-transform: uppercase;
}
.y-axis-unit-selector, .y-axis-unit-selector,
.y-axis-unit-selector-v2 { .y-axis-unit-selector-v2 {
display: flex; display: flex;
@@ -189,14 +156,7 @@
gap: 8px; gap: 8px;
.heading { .heading {
color: var(--bg-vanilla-400); @extend .section-heading;
font-family: 'Space Mono';
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 18px; /* 138.462% */
letter-spacing: 0.52px;
text-transform: uppercase;
} }
.input { .input {
@@ -249,7 +209,6 @@
.text { .text {
color: var(--bg-vanilla-400); color: var(--bg-vanilla-400);
font-family: 'Space Mono';
font-size: 12px; font-size: 12px;
font-style: normal; font-style: normal;
font-weight: 400; font-weight: 400;
@@ -270,30 +229,9 @@
.stack-chart { .stack-chart {
flex-direction: row; flex-direction: row;
justify-content: space-between; justify-content: space-between;
.label {
color: var(--bg-vanilla-400);
font-family: 'Space Mono';
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 18px; /* 138.462% */
letter-spacing: 0.52px;
text-transform: uppercase;
}
} }
.bucket-config { .bucket-config {
.label {
color: var(--bg-vanilla-400);
font-family: 'Space Mono';
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 18px; /* 138.462% */
letter-spacing: 0.52px;
text-transform: uppercase;
}
.bucket-size-label { .bucket-size-label {
margin-top: 8px; margin-top: 8px;
} }
@@ -322,7 +260,6 @@
.label { .label {
color: var(--bg-vanilla-400); color: var(--bg-vanilla-400);
font-family: 'Space Mono';
font-size: 13px; font-size: 13px;
font-style: normal; font-style: normal;
font-weight: 400; font-weight: 400;
@@ -354,7 +291,6 @@
.alerts-text { .alerts-text {
color: var(--bg-vanilla-400); color: var(--bg-vanilla-400);
font-family: Inter;
font-size: 14px; font-size: 14px;
font-style: normal; font-style: normal;
font-weight: 400; font-weight: 400;
@@ -400,6 +336,9 @@
.lightMode { .lightMode {
.right-container { .right-container {
background-color: var(--bg-vanilla-100); background-color: var(--bg-vanilla-100);
.section-heading {
color: var(--bg-ink-400);
}
.header { .header {
.header-text { .header-text {
color: var(--bg-ink-400); color: var(--bg-ink-400);
@@ -427,10 +366,6 @@
} }
.panel-config { .panel-config {
.typography {
color: var(--bg-ink-400);
}
.panel-type-select { .panel-type-select {
.ant-select-selector { .ant-select-selector {
border: 1px solid var(--bg-vanilla-300); border: 1px solid var(--bg-vanilla-300);
@@ -455,14 +390,14 @@
} }
} }
.fill-gaps { .toggle-card {
border: 1px solid var(--bg-vanilla-300); border: 1px solid var(--bg-vanilla-300);
background: var(--bg-vanilla-300); background: var(--bg-vanilla-300);
.fill-gaps-text { .fill-gaps-text {
color: var(--bg-ink-400); color: var(--bg-ink-400);
} }
.fill-gaps-text-description { .toggle-card-description {
color: var(--bg-ink-400); color: var(--bg-ink-400);
} }
} }

View File

@@ -6,6 +6,11 @@ import { MemoryRouter } from 'react-router-dom';
import { render as rtlRender, screen } from '@testing-library/react'; import { render as rtlRender, screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event'; import userEvent from '@testing-library/user-event';
import { PANEL_TYPES } from 'constants/queryBuilder'; import { PANEL_TYPES } from 'constants/queryBuilder';
import {
FillMode,
LineInterpolation,
LineStyle,
} from 'lib/uPlotV2/config/types';
import { AppContext } from 'providers/App/App'; import { AppContext } from 'providers/App/App';
import { IAppContext } from 'providers/App/types'; import { IAppContext } from 'providers/App/types';
import { ErrorModalProvider } from 'providers/ErrorModalProvider'; import { ErrorModalProvider } from 'providers/ErrorModalProvider';
@@ -165,6 +170,14 @@ describe('RightContainer - Alerts Section', () => {
setContextLinks: jest.fn(), setContextLinks: jest.fn(),
enableDrillDown: false, enableDrillDown: false,
isNewDashboard: false, isNewDashboard: false,
lineInterpolation: LineInterpolation.Spline,
fillMode: FillMode.None,
lineStyle: LineStyle.Solid,
setLineInterpolation: jest.fn(),
setFillMode: jest.fn(),
setLineStyle: jest.fn(),
showPoints: false,
setShowPoints: jest.fn(),
}; };
beforeEach(() => { beforeEach(() => {

View File

@@ -43,7 +43,7 @@
max-height: 0; max-height: 0;
overflow: hidden; overflow: hidden;
opacity: 0; opacity: 0;
transition: max-height 0.25s ease, opacity 0.25s ease, padding 0.25s ease; transition: max-height 0.1s ease, opacity 0.1s ease, padding 0.1s ease;
&.open { &.open {
padding-bottom: 24px; padding-bottom: 24px;

View File

@@ -206,3 +206,59 @@ export const panelTypeVsDecimalPrecision: {
[PANEL_TYPES.TRACE]: false, [PANEL_TYPES.TRACE]: false,
[PANEL_TYPES.EMPTY_WIDGET]: false, [PANEL_TYPES.EMPTY_WIDGET]: false,
} as const; } as const;
export const panelTypeVsLineInterpolation: {
[key in PANEL_TYPES]: boolean;
} = {
[PANEL_TYPES.TIME_SERIES]: true,
[PANEL_TYPES.VALUE]: false,
[PANEL_TYPES.TABLE]: false,
[PANEL_TYPES.LIST]: false,
[PANEL_TYPES.PIE]: false,
[PANEL_TYPES.BAR]: false,
[PANEL_TYPES.HISTOGRAM]: false,
[PANEL_TYPES.TRACE]: false,
[PANEL_TYPES.EMPTY_WIDGET]: false,
} as const;
export const panelTypeVsLineStyle: {
[key in PANEL_TYPES]: boolean;
} = {
[PANEL_TYPES.TIME_SERIES]: true,
[PANEL_TYPES.VALUE]: false,
[PANEL_TYPES.TABLE]: false,
[PANEL_TYPES.LIST]: false,
[PANEL_TYPES.PIE]: false,
[PANEL_TYPES.BAR]: false,
[PANEL_TYPES.HISTOGRAM]: false,
[PANEL_TYPES.TRACE]: false,
[PANEL_TYPES.EMPTY_WIDGET]: false,
} as const;
export const panelTypeVsFillMode: {
[key in PANEL_TYPES]: boolean;
} = {
[PANEL_TYPES.TIME_SERIES]: true,
[PANEL_TYPES.VALUE]: false,
[PANEL_TYPES.TABLE]: false,
[PANEL_TYPES.LIST]: false,
[PANEL_TYPES.PIE]: false,
[PANEL_TYPES.BAR]: false,
[PANEL_TYPES.HISTOGRAM]: false,
[PANEL_TYPES.TRACE]: false,
[PANEL_TYPES.EMPTY_WIDGET]: false,
} as const;
export const panelTypeVsShowPoints: {
[key in PANEL_TYPES]: boolean;
} = {
[PANEL_TYPES.TIME_SERIES]: true,
[PANEL_TYPES.VALUE]: false,
[PANEL_TYPES.TABLE]: false,
[PANEL_TYPES.LIST]: false,
[PANEL_TYPES.PIE]: false,
[PANEL_TYPES.BAR]: false,
[PANEL_TYPES.HISTOGRAM]: false,
[PANEL_TYPES.TRACE]: false,
[PANEL_TYPES.EMPTY_WIDGET]: false,
} as const;

View File

@@ -20,12 +20,18 @@ import {
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types'; import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
import TimePreference from 'components/TimePreferenceDropDown'; import TimePreference from 'components/TimePreferenceDropDown';
import { PANEL_TYPES, PanelDisplay } from 'constants/queryBuilder'; import { PANEL_TYPES, PanelDisplay } from 'constants/queryBuilder';
import GraphTypes, { import {
ItemsProps, ItemsProps,
} from 'container/DashboardContainer/ComponentsSlider/menuItems'; PanelTypesWithData,
} from 'container/DashboardContainer/PanelTypeSelectionModal/menuItems';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables'; import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
import useCreateAlerts from 'hooks/queryBuilder/useCreateAlerts'; import useCreateAlerts from 'hooks/queryBuilder/useCreateAlerts';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import {
FillMode,
LineInterpolation,
LineStyle,
} from 'lib/uPlotV2/config/types';
import { import {
Antenna, Antenna,
Axis3D, Axis3D,
@@ -34,6 +40,7 @@ import {
LayoutDashboard, LayoutDashboard,
LineChart, LineChart,
Link, Link,
Paintbrush,
Pencil, Pencil,
Plus, Plus,
SlidersHorizontal, SlidersHorizontal,
@@ -59,11 +66,15 @@ import {
panelTypeVsContextLinks, panelTypeVsContextLinks,
panelTypeVsCreateAlert, panelTypeVsCreateAlert,
panelTypeVsDecimalPrecision, panelTypeVsDecimalPrecision,
panelTypeVsFillMode,
panelTypeVsFillSpan, panelTypeVsFillSpan,
panelTypeVsLegendColors, panelTypeVsLegendColors,
panelTypeVsLegendPosition, panelTypeVsLegendPosition,
panelTypeVsLineInterpolation,
panelTypeVsLineStyle,
panelTypeVsLogScale, panelTypeVsLogScale,
panelTypeVsPanelTimePreferences, panelTypeVsPanelTimePreferences,
panelTypeVsShowPoints,
panelTypeVsSoftMinMax, panelTypeVsSoftMinMax,
panelTypeVsStackingChartPreferences, panelTypeVsStackingChartPreferences,
panelTypeVsThreshold, panelTypeVsThreshold,
@@ -71,7 +82,10 @@ import {
} from './constants'; } from './constants';
import ContextLinks from './ContextLinks'; import ContextLinks from './ContextLinks';
import DashboardYAxisUnitSelectorWrapper from './DashboardYAxisUnitSelectorWrapper'; import DashboardYAxisUnitSelectorWrapper from './DashboardYAxisUnitSelectorWrapper';
import { FillModeSelector } from './FillModeSelector';
import LegendColors from './LegendColors/LegendColors'; import LegendColors from './LegendColors/LegendColors';
import { LineInterpolationSelector } from './LineInterpolationSelector';
import { LineStyleSelector } from './LineStyleSelector';
import ThresholdSelector from './Threshold/ThresholdSelector'; import ThresholdSelector from './Threshold/ThresholdSelector';
import { ThresholdProps } from './Threshold/types'; import { ThresholdProps } from './Threshold/types';
import { timePreferance } from './timeItems'; import { timePreferance } from './timeItems';
@@ -98,6 +112,14 @@ function RightContainer({
setTitle, setTitle,
title, title,
selectedGraph, selectedGraph,
lineInterpolation,
setLineInterpolation,
fillMode,
setFillMode,
lineStyle,
setLineStyle,
showPoints,
setShowPoints,
bucketCount, bucketCount,
bucketWidth, bucketWidth,
stackedBarChart, stackedBarChart,
@@ -150,7 +172,7 @@ function RightContainer({
); );
const selectedGraphType = const selectedGraphType =
GraphTypes.find((e) => e.name === selectedGraph)?.display || ''; PanelTypesWithData.find((e) => e.name === selectedGraph)?.display || '';
const onCreateAlertsHandler = useCreateAlerts(selectedWidget, 'panelView'); const onCreateAlertsHandler = useCreateAlerts(selectedWidget, 'panelView');
@@ -174,9 +196,14 @@ function RightContainer({
panelTypeVsContextLinks[selectedGraph] && enableDrillDown; panelTypeVsContextLinks[selectedGraph] && enableDrillDown;
const allowDecimalPrecision = panelTypeVsDecimalPrecision[selectedGraph]; const allowDecimalPrecision = panelTypeVsDecimalPrecision[selectedGraph];
const allowLineInterpolation = panelTypeVsLineInterpolation[selectedGraph];
const allowLineStyle = panelTypeVsLineStyle[selectedGraph];
const allowFillMode = panelTypeVsFillMode[selectedGraph];
const allowShowPoints = panelTypeVsShowPoints[selectedGraph];
const { currentQuery } = useQueryBuilder(); const { currentQuery } = useQueryBuilder();
const [graphTypes, setGraphTypes] = useState<ItemsProps[]>(GraphTypes); const [graphTypes, setGraphTypes] = useState<ItemsProps[]>(PanelTypesWithData);
const dashboardVariableOptions = useMemo<VariableOption[]>(() => { const dashboardVariableOptions = useMemo<VariableOption[]>(() => {
return Object.entries(dashboardVariables).map(([, value]) => ({ return Object.entries(dashboardVariables).map(([, value]) => ({
@@ -200,6 +227,22 @@ function RightContainer({
[allowLegendPosition, allowLegendColors], [allowLegendPosition, allowLegendColors],
); );
const isChartAppearanceSectionVisible = useMemo(
() =>
/**
* Disabled for now as we are not done with other settings in chart appearance section
* TODO: @ahrefabhi Enable this after we are done other settings in chart appearance section
*/
// eslint-disable-next-line sonarjs/no-redundant-boolean
false &&
(allowFillMode ||
allowLineStyle ||
allowLineInterpolation ||
allowShowPoints),
[allowFillMode, allowLineStyle, allowLineInterpolation, allowShowPoints],
);
const updateCursorAndDropdown = (value: string, pos: number): void => { const updateCursorAndDropdown = (value: string, pos: number): void => {
setCursorPos(pos); setCursorPos(pos);
const lastDollar = value.lastIndexOf('$', pos - 1); const lastDollar = value.lastIndexOf('$', pos - 1);
@@ -272,7 +315,7 @@ function RightContainer({
prev.filter((graph) => graph.name !== PANEL_TYPES.LIST), prev.filter((graph) => graph.name !== PANEL_TYPES.LIST),
); );
} else { } else {
setGraphTypes(GraphTypes); setGraphTypes(PanelTypesWithData);
} }
}, [currentQuery]); }, [currentQuery]);
@@ -299,7 +342,7 @@ function RightContainer({
<SettingsSection title="General" defaultOpen icon={<Pencil size={14} />}> <SettingsSection title="General" defaultOpen icon={<Pencil size={14} />}>
<section className="name-description control-container"> <section className="name-description control-container">
<Typography.Text className="typography">Name</Typography.Text> <Typography.Text className="section-heading">Name</Typography.Text>
<AutoComplete <AutoComplete
options={dashboardVariableOptions} options={dashboardVariableOptions}
value={inputValue} value={inputValue}
@@ -319,7 +362,7 @@ function RightContainer({
onBlur={(): void => setAutoCompleteOpen(false)} onBlur={(): void => setAutoCompleteOpen(false)}
/> />
</AutoComplete> </AutoComplete>
<Typography.Text className="typography">Description</Typography.Text> <Typography.Text className="section-heading">Description</Typography.Text>
<TextArea <TextArea
placeholder="Enter the panel description here..." placeholder="Enter the panel description here..."
bordered bordered
@@ -340,7 +383,7 @@ function RightContainer({
icon={<LayoutDashboard size={14} />} icon={<LayoutDashboard size={14} />}
> >
<section className="panel-type control-container"> <section className="panel-type control-container">
<Typography.Text className="typography">Panel Type</Typography.Text> <Typography.Text className="section-heading">Panel Type</Typography.Text>
<Select <Select
onChange={setGraphHandler} onChange={setGraphHandler}
value={selectedGraph} value={selectedGraph}
@@ -361,7 +404,7 @@ function RightContainer({
{allowPanelTimePreference && ( {allowPanelTimePreference && (
<section className="panel-time-preference control-container"> <section className="panel-time-preference control-container">
<Typography.Text className="panel-time-text"> <Typography.Text className="section-heading">
Panel Time Preference Panel Time Preference
</Typography.Text> </Typography.Text>
<TimePreference <TimePreference
@@ -375,7 +418,9 @@ function RightContainer({
{allowStackingBarChart && ( {allowStackingBarChart && (
<section className="stack-chart control-container"> <section className="stack-chart control-container">
<Typography.Text className="label">Stack series</Typography.Text> <Typography.Text className="section-heading">
Stack series
</Typography.Text>
<Switch <Switch
checked={stackedBarChart} checked={stackedBarChart}
size="small" size="small"
@@ -385,10 +430,10 @@ function RightContainer({
)} )}
{allowFillSpans && ( {allowFillSpans && (
<section className="fill-gaps"> <section className="fill-gaps toggle-card">
<div className="fill-gaps-text-container"> <div className="toggle-card-text-container">
<Typography className="fill-gaps-text">Fill gaps</Typography> <Typography className="section-heading">Fill gaps</Typography>
<Typography.Text className="fill-gaps-text-description"> <Typography.Text className="toggle-card-description">
Fill gaps in data with 0 for continuity Fill gaps in data with 0 for continuity
</Typography.Text> </Typography.Text>
</div> </div>
@@ -447,6 +492,36 @@ function RightContainer({
</SettingsSection> </SettingsSection>
)} )}
{isChartAppearanceSectionVisible && (
<SettingsSection title="Chart Appearance" icon={<Paintbrush size={14} />}>
{allowFillMode && (
<FillModeSelector value={fillMode} onChange={setFillMode} />
)}
{allowLineStyle && (
<LineStyleSelector value={lineStyle} onChange={setLineStyle} />
)}
{allowLineInterpolation && (
<LineInterpolationSelector
value={lineInterpolation}
onChange={setLineInterpolation}
/>
)}
{allowShowPoints && (
<section className="show-points toggle-card">
<div className="toggle-card-text-container">
<Typography.Text className="section-heading">
Show points
</Typography.Text>
<Typography.Text className="toggle-card-description">
Display individual data points on the chart
</Typography.Text>
</div>
<Switch size="small" checked={showPoints} onChange={setShowPoints} />
</section>
)}
</SettingsSection>
)}
{isAxisSectionVisible && ( {isAxisSectionVisible && (
<SettingsSection title="Axes" icon={<Axis3D size={14} />}> <SettingsSection title="Axes" icon={<Axis3D size={14} />}>
{allowSoftMinMax && ( {allowSoftMinMax && (
@@ -474,7 +549,9 @@ function RightContainer({
{allowLogScale && ( {allowLogScale && (
<section className="log-scale control-container"> <section className="log-scale control-container">
<Typography.Text className="typography">Y Axis Scale</Typography.Text> <Typography.Text className="section-heading">
Y Axis Scale
</Typography.Text>
<Select <Select
onChange={(value): void => onChange={(value): void =>
setIsLogScale(value === LogScale.LOGARITHMIC) setIsLogScale(value === LogScale.LOGARITHMIC)
@@ -510,7 +587,7 @@ function RightContainer({
<SettingsSection title="Legend" icon={<Layers size={14} />}> <SettingsSection title="Legend" icon={<Layers size={14} />}>
{allowLegendPosition && ( {allowLegendPosition && (
<section className="legend-position control-container"> <section className="legend-position control-container">
<Typography.Text className="typography">Position</Typography.Text> <Typography.Text className="section-heading">Position</Typography.Text>
<Select <Select
onChange={(value: LegendPosition): void => setLegendPosition(value)} onChange={(value: LegendPosition): void => setLegendPosition(value)}
value={legendPosition} value={legendPosition}
@@ -547,7 +624,9 @@ function RightContainer({
{allowBucketConfig && ( {allowBucketConfig && (
<SettingsSection title="Histogram / Buckets"> <SettingsSection title="Histogram / Buckets">
<section className="bucket-config control-container"> <section className="bucket-config control-container">
<Typography.Text className="label">Number of buckets</Typography.Text> <Typography.Text className="section-heading">
Number of buckets
</Typography.Text>
<InputNumber <InputNumber
value={bucketCount || null} value={bucketCount || null}
type="number" type="number"
@@ -558,7 +637,7 @@ function RightContainer({
setBucketCount(val || 0); setBucketCount(val || 0);
}} }}
/> />
<Typography.Text className="label bucket-size-label"> <Typography.Text className="section-heading bucket-size-label">
Bucket width Bucket width
</Typography.Text> </Typography.Text>
<InputNumber <InputNumber
@@ -574,7 +653,7 @@ function RightContainer({
}} }}
/> />
<section className="combine-hist"> <section className="combine-hist">
<Typography.Text className="label"> <Typography.Text className="section-heading">
Merge all series into one Merge all series into one
</Typography.Text> </Typography.Text>
<Switch <Switch
@@ -682,6 +761,14 @@ export interface RightContainerProps {
setContextLinks: Dispatch<SetStateAction<ContextLinksData>>; setContextLinks: Dispatch<SetStateAction<ContextLinksData>>;
enableDrillDown?: boolean; enableDrillDown?: boolean;
isNewDashboard: boolean; isNewDashboard: boolean;
lineInterpolation: LineInterpolation;
setLineInterpolation: Dispatch<SetStateAction<LineInterpolation>>;
fillMode: FillMode;
setFillMode: Dispatch<SetStateAction<FillMode>>;
lineStyle: LineStyle;
setLineStyle: Dispatch<SetStateAction<LineStyle>>;
showPoints: boolean;
setShowPoints: Dispatch<SetStateAction<boolean>>;
} }
RightContainer.defaultProps = { RightContainer.defaultProps = {

View File

@@ -6,6 +6,11 @@ import { UseQueryResult } from 'react-query';
import { useSelector } from 'react-redux'; import { useSelector } from 'react-redux';
import { generatePath } from 'react-router-dom'; import { generatePath } from 'react-router-dom';
import { WarningOutlined } from '@ant-design/icons'; import { WarningOutlined } from '@ant-design/icons';
import {
ResizableHandle,
ResizablePanel,
ResizablePanelGroup,
} from '@signozhq/resizable';
import { Button, Flex, Modal, Space, Typography } from 'antd'; import { Button, Flex, Modal, Space, Typography } from 'antd';
import logEvent from 'api/common/logEvent'; import logEvent from 'api/common/logEvent';
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types'; import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
@@ -24,12 +29,16 @@ import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard'; import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys'; import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useSafeNavigate } from 'hooks/useSafeNavigate'; import { useSafeNavigate } from 'hooks/useSafeNavigate';
import useUrlQuery from 'hooks/useUrlQuery'; import useUrlQuery from 'hooks/useUrlQuery';
import createQueryParams from 'lib/createQueryParams'; import createQueryParams from 'lib/createQueryParams';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults'; import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables'; import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import {
FillMode,
LineInterpolation,
LineStyle,
} from 'lib/uPlotV2/config/types';
import { cloneDeep, defaultTo, isEmpty, isUndefined } from 'lodash-es'; import { cloneDeep, defaultTo, isEmpty, isUndefined } from 'lodash-es';
import { Check, X } from 'lucide-react'; import { Check, X } from 'lucide-react';
import { useScrollToWidgetIdStore } from 'providers/Dashboard/helpers/scrollToWidgetIdHelper'; import { useScrollToWidgetIdStore } from 'providers/Dashboard/helpers/scrollToWidgetIdHelper';
@@ -63,12 +72,7 @@ import QueryTypeTag from './LeftContainer/QueryTypeTag';
import RightContainer from './RightContainer'; import RightContainer from './RightContainer';
import { ThresholdProps } from './RightContainer/Threshold/types'; import { ThresholdProps } from './RightContainer/Threshold/types';
import TimeItems, { timePreferance } from './RightContainer/timeItems'; import TimeItems, { timePreferance } from './RightContainer/timeItems';
import { import { Container, PanelContainer } from './styles';
Container,
LeftContainerWrapper,
PanelContainer,
RightContainerWrapper,
} from './styles';
import { NewWidgetProps } from './types'; import { NewWidgetProps } from './types';
import { import {
getDefaultWidgetData, getDefaultWidgetData,
@@ -204,6 +208,18 @@ function NewWidget({
const [legendPosition, setLegendPosition] = useState<LegendPosition>( const [legendPosition, setLegendPosition] = useState<LegendPosition>(
selectedWidget?.legendPosition || LegendPosition.BOTTOM, selectedWidget?.legendPosition || LegendPosition.BOTTOM,
); );
const [lineInterpolation, setLineInterpolation] = useState<LineInterpolation>(
selectedWidget?.lineInterpolation || LineInterpolation.Spline,
);
const [fillMode, setFillMode] = useState<FillMode>(
selectedWidget?.fillMode || FillMode.None,
);
const [lineStyle, setLineStyle] = useState<LineStyle>(
selectedWidget?.lineStyle || LineStyle.Solid,
);
const [showPoints, setShowPoints] = useState<boolean>(
selectedWidget?.showPoints ?? false,
);
const [customLegendColors, setCustomLegendColors] = useState< const [customLegendColors, setCustomLegendColors] = useState<
Record<string, string> Record<string, string>
>(selectedWidget?.customLegendColors || {}); >(selectedWidget?.customLegendColors || {});
@@ -269,6 +285,10 @@ function NewWidget({
softMin, softMin,
softMax, softMax,
fillSpans: isFillSpans, fillSpans: isFillSpans,
lineInterpolation,
fillMode,
lineStyle,
showPoints,
columnUnits, columnUnits,
bucketCount, bucketCount,
stackedBarChart, stackedBarChart,
@@ -304,6 +324,10 @@ function NewWidget({
stackedBarChart, stackedBarChart,
isLogScale, isLogScale,
legendPosition, legendPosition,
lineInterpolation,
fillMode,
lineStyle,
showPoints,
customLegendColors, customLegendColors,
contextLinks, contextLinks,
selectedWidget.columnWidths, selectedWidget.columnWidths,
@@ -757,7 +781,7 @@ function NewWidget({
}, [query, safeNavigate, dashboardId, currentQuery]); }, [query, safeNavigate, dashboardId, currentQuery]);
return ( return (
<Container> <Container className="new-widget-container">
<div className="edit-header"> <div className="edit-header">
<div className="left-header"> <div className="left-header">
<X <X
@@ -811,79 +835,102 @@ function NewWidget({
</div> </div>
<PanelContainer> <PanelContainer>
<LeftContainerWrapper isDarkMode={useIsDarkMode()}> <ResizablePanelGroup
<OverlayScrollbar> direction="horizontal"
{selectedWidget && ( className="widget-resizable-panel-group"
<LeftContainer autoSaveId="panel-editor"
selectedGraph={graphType} >
selectedLogFields={selectedLogFields} <ResizablePanel
setSelectedLogFields={setSelectedLogFields} minSize={70}
selectedTracesFields={selectedTracesFields} maxSize={80}
setSelectedTracesFields={setSelectedTracesFields} defaultSize={80}
selectedWidget={selectedWidget} className="resizable-panel-left-container"
selectedTime={selectedTime} >
requestData={requestData} <OverlayScrollbar>
setRequestData={setRequestData} {selectedWidget && (
isLoadingPanelData={isLoadingPanelData} <LeftContainer
setQueryResponse={setQueryResponse} selectedDashboard={selectedDashboard}
enableDrillDown={enableDrillDown} selectedGraph={graphType}
selectedDashboard={selectedDashboard} selectedLogFields={selectedLogFields}
isNewPanel={isNewPanel} setSelectedLogFields={setSelectedLogFields}
/> selectedTracesFields={selectedTracesFields}
)} setSelectedTracesFields={setSelectedTracesFields}
</OverlayScrollbar> selectedWidget={selectedWidget}
</LeftContainerWrapper> selectedTime={selectedTime}
requestData={requestData}
<RightContainerWrapper> setRequestData={setRequestData}
<RightContainer isLoadingPanelData={isLoadingPanelData}
setGraphHandler={setGraphHandler} setQueryResponse={setQueryResponse}
title={title} enableDrillDown={enableDrillDown}
setTitle={setTitle} />
description={description} )}
setDescription={setDescription} </OverlayScrollbar>
stackedBarChart={stackedBarChart} </ResizablePanel>
setStackedBarChart={setStackedBarChart} <ResizableHandle withHandle className="widget-resizable-handle" />
opacity={opacity} <ResizablePanel
yAxisUnit={yAxisUnit} minSize={20}
columnUnits={columnUnits} maxSize={30}
setColumnUnits={setColumnUnits} defaultSize={20}
bucketCount={bucketCount} className="resizable-panel-right-container"
bucketWidth={bucketWidth} >
combineHistogram={combineHistogram} <RightContainer
setCombineHistogram={setCombineHistogram} setGraphHandler={setGraphHandler}
setBucketWidth={setBucketWidth} title={title}
setBucketCount={setBucketCount} setTitle={setTitle}
setOpacity={setOpacity} description={description}
selectedNullZeroValue={selectedNullZeroValue} setDescription={setDescription}
setSelectedNullZeroValue={setSelectedNullZeroValue} stackedBarChart={stackedBarChart}
selectedGraph={graphType} setStackedBarChart={setStackedBarChart}
setSelectedTime={setSelectedTime} lineInterpolation={lineInterpolation}
selectedTime={selectedTime} setLineInterpolation={setLineInterpolation}
setYAxisUnit={setYAxisUnit} fillMode={fillMode}
decimalPrecision={decimalPrecision} setFillMode={setFillMode}
setDecimalPrecision={setDecimalPrecision} lineStyle={lineStyle}
thresholds={thresholds} setLineStyle={setLineStyle}
setThresholds={setThresholds} showPoints={showPoints}
selectedWidget={selectedWidget} setShowPoints={setShowPoints}
isFillSpans={isFillSpans} opacity={opacity}
setIsFillSpans={setIsFillSpans} yAxisUnit={yAxisUnit}
isLogScale={isLogScale} columnUnits={columnUnits}
setIsLogScale={setIsLogScale} setColumnUnits={setColumnUnits}
legendPosition={legendPosition} bucketCount={bucketCount}
setLegendPosition={setLegendPosition} bucketWidth={bucketWidth}
customLegendColors={customLegendColors} combineHistogram={combineHistogram}
setCustomLegendColors={setCustomLegendColors} setCombineHistogram={setCombineHistogram}
queryResponse={queryResponse} setBucketWidth={setBucketWidth}
softMin={softMin} setBucketCount={setBucketCount}
setSoftMin={setSoftMin} setOpacity={setOpacity}
softMax={softMax} selectedNullZeroValue={selectedNullZeroValue}
setSoftMax={setSoftMax} setSelectedNullZeroValue={setSelectedNullZeroValue}
contextLinks={contextLinks} selectedGraph={graphType}
setContextLinks={setContextLinks} setSelectedTime={setSelectedTime}
enableDrillDown={enableDrillDown} selectedTime={selectedTime}
isNewDashboard={isNewDashboard} setYAxisUnit={setYAxisUnit}
/> decimalPrecision={decimalPrecision}
</RightContainerWrapper> setDecimalPrecision={setDecimalPrecision}
thresholds={thresholds}
setThresholds={setThresholds}
selectedWidget={selectedWidget}
isFillSpans={isFillSpans}
setIsFillSpans={setIsFillSpans}
isLogScale={isLogScale}
setIsLogScale={setIsLogScale}
legendPosition={legendPosition}
setLegendPosition={setLegendPosition}
customLegendColors={customLegendColors}
setCustomLegendColors={setCustomLegendColors}
queryResponse={queryResponse}
softMin={softMin}
setSoftMin={setSoftMin}
softMax={softMax}
setSoftMax={setSoftMax}
contextLinks={contextLinks}
setContextLinks={setContextLinks}
enableDrillDown={enableDrillDown}
isNewDashboard={isNewDashboard}
/>
</ResizablePanel>
</ResizablePanelGroup>
</PanelContainer> </PanelContainer>
<Modal <Modal
title={ title={

View File

@@ -1,4 +1,4 @@
import { Col, Tag as AntDTag } from 'antd'; import { Tag as AntDTag } from 'antd';
import styled from 'styled-components'; import styled from 'styled-components';
export const Container = styled.div` export const Container = styled.div`
@@ -8,42 +8,6 @@ export const Container = styled.div`
overflow-y: hidden; overflow-y: hidden;
`; `;
export const RightContainerWrapper = styled(Col)`
&&& {
max-width: 400px;
width: 30%;
overflow-y: auto;
}
&::-webkit-scrollbar {
width: 0.3rem;
}
&::-webkit-scrollbar-thumb {
background: rgb(136, 136, 136);
border-radius: 0.625rem;
}
&::-webkit-scrollbar-track {
background: transparent;
}
`;
interface LeftContainerWrapperProps {
isDarkMode: boolean;
}
export const LeftContainerWrapper = styled(Col)<LeftContainerWrapperProps>`
&&& {
width: 100%;
overflow-y: auto;
border-right: ${({ isDarkMode }): string =>
isDarkMode
? '1px solid var(--bg-slate-300)'
: '1px solid var(--bg-vanilla-300)'};
}
&::-webkit-scrollbar {
width: 0rem;
}
`;
export const ButtonContainer = styled.div` export const ButtonContainer = styled.div`
display: flex; display: flex;
gap: 8px; gap: 8px;

View File

@@ -11,11 +11,8 @@ import { getYAxisCategories } from 'components/YAxisUnitSelector/utils';
import { import {
initialQueryBuilderFormValuesMap, initialQueryBuilderFormValuesMap,
PANEL_TYPES, PANEL_TYPES,
} from 'constants/queryBuilder';
import {
listViewInitialLogQuery,
PANEL_TYPES_INITIAL_QUERY, PANEL_TYPES_INITIAL_QUERY,
} from 'container/DashboardContainer/ComponentsSlider/constants'; } from 'constants/queryBuilder';
import { import {
defaultLogsSelectedColumns, defaultLogsSelectedColumns,
defaultTraceSelectedColumns, defaultTraceSelectedColumns,
@@ -549,10 +546,7 @@ export const getDefaultWidgetData = (
nullZeroValues: '', nullZeroValues: '',
opacity: '', opacity: '',
panelTypes: name, panelTypes: name,
query: query: PANEL_TYPES_INITIAL_QUERY[name],
name === PANEL_TYPES.LIST
? listViewInitialLogQuery
: PANEL_TYPES_INITIAL_QUERY[name],
timePreferance: 'GLOBAL_TIME', timePreferance: 'GLOBAL_TIME',
softMax: null, softMax: null,
softMin: null, softMin: null,

View File

@@ -12,6 +12,8 @@ import {
ATTRIBUTE_TYPES, ATTRIBUTE_TYPES,
initialAutocompleteData, initialAutocompleteData,
initialQueryBuilderFormValuesMap, initialQueryBuilderFormValuesMap,
listViewInitialLogQuery,
listViewInitialTraceQuery,
mapOfFormulaToFilters, mapOfFormulaToFilters,
mapOfQueryFilters, mapOfQueryFilters,
PANEL_TYPES, PANEL_TYPES,
@@ -23,10 +25,6 @@ import {
metricsUnknownSpaceAggregateOperatorOptions, metricsUnknownSpaceAggregateOperatorOptions,
metricsUnknownTimeAggregateOperatorOptions, metricsUnknownTimeAggregateOperatorOptions,
} from 'constants/queryBuilderOperators'; } from 'constants/queryBuilderOperators';
import {
listViewInitialLogQuery,
listViewInitialTraceQuery,
} from 'container/DashboardContainer/ComponentsSlider/constants';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { getMetricsOperatorsByAttributeType } from 'lib/newQueryBuilder/getMetricsOperatorsByAttributeType'; import { getMetricsOperatorsByAttributeType } from 'lib/newQueryBuilder/getMetricsOperatorsByAttributeType';
import { getOperatorsBySourceAndPanelType } from 'lib/newQueryBuilder/getOperatorsBySourceAndPanelType'; import { getOperatorsBySourceAndPanelType } from 'lib/newQueryBuilder/getOperatorsBySourceAndPanelType';

View File

@@ -3,14 +3,15 @@ import { generateColor } from 'lib/uPlotLib/utils/generateColor';
import { calculateWidthBasedOnStepInterval } from 'lib/uPlotV2/utils'; import { calculateWidthBasedOnStepInterval } from 'lib/uPlotV2/utils';
import uPlot, { Series } from 'uplot'; import uPlot, { Series } from 'uplot';
import { generateGradientFill } from '../utils/generateGradientFill';
import { import {
BarAlignment, BarAlignment,
ConfigBuilder, ConfigBuilder,
DrawStyle, DrawStyle,
FillMode,
LineInterpolation, LineInterpolation,
LineStyle, LineStyle,
SeriesProps, SeriesProps,
VisibilityMode,
} from './types'; } from './types';
/** /**
@@ -52,7 +53,7 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
}: { }: {
resolvedLineColor: string; resolvedLineColor: string;
}): Partial<Series> { }): Partial<Series> {
const { lineWidth, lineStyle, lineCap, fillColor } = this.props; const { lineWidth, lineStyle, lineCap, fillColor, fillMode } = this.props;
const lineConfig: Partial<Series> = { const lineConfig: Partial<Series> = {
stroke: resolvedLineColor, stroke: resolvedLineColor,
width: lineWidth ?? DEFAULT_LINE_WIDTH, width: lineWidth ?? DEFAULT_LINE_WIDTH,
@@ -66,12 +67,26 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
lineConfig.cap = lineCap; lineConfig.cap = lineCap;
} }
if (fillColor) { /**
lineConfig.fill = fillColor; * Configure area fill based on draw style and fill mode:
} else if (this.props.drawStyle === DrawStyle.Bar) { * - bar charts always use a solid fill with the series color
lineConfig.fill = resolvedLineColor; * - histogram uses the same color with a fixed alpha suffix for translucency
* - for other series, an explicit fillMode controls whether we use a solid fill
* or a vertical gradient from the series color to transparent
*/
const finalFillColor = fillColor ?? resolvedLineColor;
if (this.props.drawStyle === DrawStyle.Bar) {
lineConfig.fill = finalFillColor;
} else if (this.props.drawStyle === DrawStyle.Histogram) { } else if (this.props.drawStyle === DrawStyle.Histogram) {
lineConfig.fill = `${resolvedLineColor}40`; lineConfig.fill = `${finalFillColor}40`;
} else if (fillMode && fillMode !== FillMode.None) {
if (fillMode === FillMode.Solid) {
lineConfig.fill = finalFillColor;
} else if (fillMode === FillMode.Gradient) {
lineConfig.fill = (self: uPlot): CanvasGradient =>
generateGradientFill(self, finalFillColor, 'rgba(0, 0, 0, 0)');
}
} }
return lineConfig; return lineConfig;
@@ -159,12 +174,8 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
pointsConfig.show = pointsBuilder; pointsConfig.show = pointsBuilder;
} else if (drawStyle === DrawStyle.Points) { } else if (drawStyle === DrawStyle.Points) {
pointsConfig.show = true; pointsConfig.show = true;
} else if (showPoints === VisibilityMode.Never) {
pointsConfig.show = false;
} else if (showPoints === VisibilityMode.Always) {
pointsConfig.show = true;
} else { } else {
pointsConfig.show = false; // default to hidden pointsConfig.show = !!showPoints;
} }
return pointsConfig; return pointsConfig;

View File

@@ -2,12 +2,7 @@ import { themeColors } from 'constants/theme';
import uPlot from 'uplot'; import uPlot from 'uplot';
import type { SeriesProps } from '../types'; import type { SeriesProps } from '../types';
import { import { DrawStyle, LineInterpolation, LineStyle } from '../types';
DrawStyle,
LineInterpolation,
LineStyle,
VisibilityMode,
} from '../types';
import { POINT_SIZE_FACTOR, UPlotSeriesBuilder } from '../UPlotSeriesBuilder'; import { POINT_SIZE_FACTOR, UPlotSeriesBuilder } from '../UPlotSeriesBuilder';
const createBaseProps = ( const createBaseProps = (
@@ -168,17 +163,17 @@ describe('UPlotSeriesBuilder', () => {
expect(config.points?.show).toBe(pointsBuilder); expect(config.points?.show).toBe(pointsBuilder);
}); });
it('respects VisibilityMode for point visibility when no custom pointsBuilder is given', () => { it('respects showPoints for point visibility when no custom pointsBuilder is given', () => {
const neverPointsBuilder = new UPlotSeriesBuilder( const neverPointsBuilder = new UPlotSeriesBuilder(
createBaseProps({ createBaseProps({
drawStyle: DrawStyle.Line, drawStyle: DrawStyle.Line,
showPoints: VisibilityMode.Never, showPoints: false,
}), }),
); );
const alwaysPointsBuilder = new UPlotSeriesBuilder( const alwaysPointsBuilder = new UPlotSeriesBuilder(
createBaseProps({ createBaseProps({
drawStyle: DrawStyle.Line, drawStyle: DrawStyle.Line,
showPoints: VisibilityMode.Always, showPoints: true,
}), }),
); );

View File

@@ -122,12 +122,6 @@ export enum LineInterpolation {
StepBefore = 'stepBefore', StepBefore = 'stepBefore',
} }
export enum VisibilityMode {
Always = 'always',
Auto = 'auto',
Never = 'never',
}
/** /**
* Props for configuring lines * Props for configuring lines
*/ */
@@ -163,7 +157,13 @@ export interface BarConfig {
export interface PointsConfig { export interface PointsConfig {
pointColor?: string; pointColor?: string;
pointSize?: number; pointSize?: number;
showPoints?: VisibilityMode; showPoints?: boolean;
}
export enum FillMode {
Solid = 'solid',
Gradient = 'gradient',
None = 'none',
} }
export interface SeriesProps extends LineConfig, PointsConfig, BarConfig { export interface SeriesProps extends LineConfig, PointsConfig, BarConfig {
@@ -177,6 +177,7 @@ export interface SeriesProps extends LineConfig, PointsConfig, BarConfig {
show?: boolean; show?: boolean;
spanGaps?: boolean; spanGaps?: boolean;
fillColor?: string; fillColor?: string;
fillMode?: FillMode;
isDarkMode?: boolean; isDarkMode?: boolean;
stepInterval?: number; stepInterval?: number;
} }

View File

@@ -0,0 +1,18 @@
import uPlot from 'uplot';
export function generateGradientFill(
uPlotInstance: uPlot,
startColor: string,
endColor: string,
): CanvasGradient {
const g = uPlotInstance.ctx.createLinearGradient(
0,
0,
0,
uPlotInstance.bbox.height,
);
g.addColorStop(0, `${startColor}70`);
g.addColorStop(0.6, `${startColor}40`);
g.addColorStop(1, endColor);
return g;
}

View File

@@ -53,9 +53,7 @@ import { IDashboardContext, WidgetColumnWidths } from './types';
import { sortLayout } from './util'; import { sortLayout } from './util';
export const DashboardContext = createContext<IDashboardContext>({ export const DashboardContext = createContext<IDashboardContext>({
isDashboardSliderOpen: false,
isDashboardLocked: false, isDashboardLocked: false,
handleToggleDashboardSlider: () => {},
handleDashboardLockToggle: () => {}, handleDashboardLockToggle: () => {},
dashboardResponse: {} as UseQueryResult< dashboardResponse: {} as UseQueryResult<
SuccessResponseV2<Dashboard>, SuccessResponseV2<Dashboard>,
@@ -82,8 +80,6 @@ export function DashboardProvider({
children, children,
dashboardId, dashboardId,
}: PropsWithChildren<{ dashboardId: string }>): JSX.Element { }: PropsWithChildren<{ dashboardId: string }>): JSX.Element {
const [isDashboardSliderOpen, setIsDashboardSlider] = useState<boolean>(false);
const [isDashboardLocked, setIsDashboardLocked] = useState<boolean>(false); const [isDashboardLocked, setIsDashboardLocked] = useState<boolean>(false);
const [ const [
@@ -288,13 +284,8 @@ export function DashboardProvider({
} }
}, [isVisible]); }, [isVisible]);
const handleToggleDashboardSlider = (value: boolean): void => {
setIsDashboardSlider(value);
};
const { mutate: lockDashboard } = useMutation(locked, { const { mutate: lockDashboard } = useMutation(locked, {
onSuccess: (_, props) => { onSuccess: (_, props) => {
setIsDashboardSlider(false);
setIsDashboardLocked(props.lock); setIsDashboardLocked(props.lock);
}, },
onError: (error) => { onError: (error) => {
@@ -319,9 +310,7 @@ export function DashboardProvider({
const value: IDashboardContext = useMemo( const value: IDashboardContext = useMemo(
() => ({ () => ({
isDashboardSliderOpen,
isDashboardLocked, isDashboardLocked,
handleToggleDashboardSlider,
handleDashboardLockToggle, handleDashboardLockToggle,
dashboardResponse, dashboardResponse,
selectedDashboard, selectedDashboard,
@@ -341,7 +330,6 @@ export function DashboardProvider({
}), }),
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
[ [
isDashboardSliderOpen,
isDashboardLocked, isDashboardLocked,
dashboardResponse, dashboardResponse,
selectedDashboard, selectedDashboard,

View File

@@ -0,0 +1,18 @@
import { create } from 'zustand';
interface IPanelTypeSelectionModalState {
isPanelTypeSelectionModalOpen: boolean;
setIsPanelTypeSelectionModalOpen: (isOpen: boolean) => void;
}
/**
* This helper is used for selecting the panel type when creating a new panel in the dashboard.
* It uses Zustand for state management to keep track of whether the panel type selection modal is open or closed.
*/
export const usePanelTypeSelectionModalStore = create<IPanelTypeSelectionModalState>(
(set) => ({
isPanelTypeSelectionModalOpen: false,
setIsPanelTypeSelectionModalOpen: (isOpen): void =>
set({ isPanelTypeSelectionModalOpen: isOpen }),
}),
);

View File

@@ -9,9 +9,7 @@ export type WidgetColumnWidths = {
}; };
export interface IDashboardContext { export interface IDashboardContext {
isDashboardSliderOpen: boolean;
isDashboardLocked: boolean; isDashboardLocked: boolean;
handleToggleDashboardSlider: (value: boolean) => void;
handleDashboardLockToggle: (value: boolean) => void; handleDashboardLockToggle: (value: boolean) => void;
dashboardResponse: UseQueryResult<SuccessResponseV2<Dashboard>, unknown>; dashboardResponse: UseQueryResult<SuccessResponseV2<Dashboard>, unknown>;
selectedDashboard: Dashboard | undefined; selectedDashboard: Dashboard | undefined;

View File

@@ -5,6 +5,11 @@ import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types'; import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems'; import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
import { QueryTableProps } from 'container/QueryTable/QueryTable.intefaces'; import { QueryTableProps } from 'container/QueryTable/QueryTable.intefaces';
import {
FillMode,
LineInterpolation,
LineStyle,
} from 'lib/uPlotV2/config/types';
import { Query } from 'types/api/queryBuilder/queryBuilderData'; import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { IField } from '../logs/fields'; import { IField } from '../logs/fields';
@@ -132,6 +137,10 @@ export interface IBaseWidget {
legendPosition?: LegendPosition; legendPosition?: LegendPosition;
customLegendColors?: Record<string, string>; customLegendColors?: Record<string, string>;
contextLinks?: ContextLinksData; contextLinks?: ContextLinksData;
lineInterpolation?: LineInterpolation;
showPoints?: boolean;
lineStyle?: LineStyle;
fillMode?: FillMode;
} }
export interface Widgets extends IBaseWidget { export interface Widgets extends IBaseWidget {
query: Query; query: Query;

View File

@@ -5646,7 +5646,7 @@
tailwind-merge "^2.5.2" tailwind-merge "^2.5.2"
tailwindcss-animate "^1.0.7" tailwindcss-animate "^1.0.7"
"@signozhq/toggle-group@^0.0.1": "@signozhq/toggle-group@0.0.1":
version "0.0.1" version "0.0.1"
resolved "https://registry.yarnpkg.com/@signozhq/toggle-group/-/toggle-group-0.0.1.tgz#c82ff1da34e77b24da53c2d595ad6b4a0d1b1de4" resolved "https://registry.yarnpkg.com/@signozhq/toggle-group/-/toggle-group-0.0.1.tgz#c82ff1da34e77b24da53c2d595ad6b4a0d1b1de4"
integrity sha512-871bQayL5MaqsuNOFHKexidu9W2Hlg1y4xmH8C5mGmlfZ4bd0ovJ9OweQrM6Puys3jeMwi69xmJuesYCfKQc1g== integrity sha512-871bQayL5MaqsuNOFHKexidu9W2Hlg1y4xmH8C5mGmlfZ4bd0ovJ9OweQrM6Puys3jeMwi69xmJuesYCfKQc1g==

View File

@@ -0,0 +1,41 @@
package loghandler
import (
"context"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors"
)
type filtering struct{}
func NewFiltering() *filtering {
return &filtering{}
}
func (h *filtering) Wrap(next LogHandler) LogHandler {
return LogHandlerFunc(func(ctx context.Context, record slog.Record) error {
if !filterRecord(record) {
return nil
}
return next.Handle(ctx, record)
})
}
func filterRecord(record slog.Record) bool {
suppress := false
record.Attrs(func(a slog.Attr) bool {
if a.Value.Kind() == slog.KindAny {
if err, ok := a.Value.Any().(error); ok {
if errors.Is(err, context.Canceled) {
suppress = true
return false
}
}
}
return true
})
return !suppress
}

View File

@@ -0,0 +1,52 @@
package loghandler
import (
"bytes"
"context"
"encoding/json"
"log/slog"
"testing"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestFiltering_SuppressesContextCanceled(t *testing.T) {
filtering := NewFiltering()
buf := bytes.NewBuffer(nil)
logger := slog.New(&handler{base: slog.NewJSONHandler(buf, &slog.HandlerOptions{Level: slog.LevelDebug}), wrappers: []Wrapper{filtering}})
logger.ErrorContext(context.Background(), "operation failed", "error", context.Canceled)
assert.Empty(t, buf.String(), "log with context.Canceled should be suppressed")
}
func TestFiltering_AllowsOtherErrors(t *testing.T) {
filtering := NewFiltering()
buf := bytes.NewBuffer(nil)
logger := slog.New(&handler{base: slog.NewJSONHandler(buf, &slog.HandlerOptions{Level: slog.LevelDebug}), wrappers: []Wrapper{filtering}})
logger.ErrorContext(context.Background(), "operation failed", "error", errors.New(errors.TypeInternal, errors.CodeInternal, "some other error"))
m := make(map[string]any)
err := json.Unmarshal(buf.Bytes(), &m)
require.NoError(t, err)
assert.Equal(t, "operation failed", m["msg"])
}
func TestFiltering_AllowsLogsWithoutErrors(t *testing.T) {
filtering := NewFiltering()
buf := bytes.NewBuffer(nil)
logger := slog.New(&handler{base: slog.NewJSONHandler(buf, &slog.HandlerOptions{Level: slog.LevelDebug}), wrappers: []Wrapper{filtering}})
logger.InfoContext(context.Background(), "normal log", "key", "value")
m := make(map[string]any)
err := json.Unmarshal(buf.Bytes(), &m)
require.NoError(t, err)
assert.Equal(t, "normal log", m["msg"])
}

View File

@@ -116,7 +116,7 @@ func New(ctx context.Context, cfg Config, build version.Build, serviceName strin
meterProvider: meterProvider, meterProvider: meterProvider,
meterProviderShutdownFunc: meterProviderShutdownFunc, meterProviderShutdownFunc: meterProviderShutdownFunc,
prometheusRegistry: prometheusRegistry, prometheusRegistry: prometheusRegistry,
logger: NewLogger(cfg, loghandler.NewCorrelation()), logger: NewLogger(cfg, loghandler.NewCorrelation(), loghandler.NewFiltering()),
startCh: make(chan struct{}), startCh: make(chan struct{}),
}, nil }, nil
} }

View File

@@ -4,60 +4,43 @@ import (
"context" "context"
"net/http" "net/http"
"github.com/SigNoz/signoz/pkg/types/cloudintegrationtypes" citypes "github.com/SigNoz/signoz/pkg/types/cloudintegrationtypes"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes" "github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
) )
type Module interface { type Module interface {
// CreateConnectionArtifact generates cloud provider specific connection information, CreateAccount(ctx context.Context, account *citypes.Account) error
// client side handles how this information is shown
CreateConnectionArtifact(
ctx context.Context,
orgID valuer.UUID,
provider cloudintegrationtypes.CloudProviderType,
request *cloudintegrationtypes.ConnectionArtifactRequest,
) (*cloudintegrationtypes.ConnectionArtifact, error)
// GetAccountStatus returns agent connection status for a cloud integration account // GetAccount returns cloud integration account
GetAccountStatus(ctx context.Context, orgID, accountID valuer.UUID) (*cloudintegrationtypes.AccountStatus, error) GetAccount(ctx context.Context, orgID, accountID valuer.UUID) (*citypes.Account, error)
// ListConnectedAccounts lists accounts where agent is connected // GetAccounts lists accounts where agent is connected
ListConnectedAccounts(ctx context.Context, orgID valuer.UUID) (*cloudintegrationtypes.ConnectedAccounts, error) GetAccounts(ctx context.Context, orgID valuer.UUID) ([]*citypes.Account, error)
// UpdateAccount updates the cloud integration account for a specific organization.
UpdateAccount(ctx context.Context, account *citypes.Account) error
// DisconnectAccount soft deletes/removes a cloud integration account. // DisconnectAccount soft deletes/removes a cloud integration account.
DisconnectAccount(ctx context.Context, orgID, accountID valuer.UUID) error DisconnectAccount(ctx context.Context, orgID, accountID valuer.UUID) error
// UpdateAccountConfig updates the configuration of an existing cloud account for a specific organization. // GetConnectionArtifact returns cloud provider specific connection information,
UpdateAccountConfig( // client side handles how this information is shown
ctx context.Context, GetConnectionArtifact(ctx context.Context, account *citypes.Account, req *citypes.ConnectionArtifactRequest) (*citypes.ConnectionArtifact, error)
orgID,
accountID valuer.UUID,
config *cloudintegrationtypes.UpdateAccountConfigRequest,
) (*cloudintegrationtypes.Account, error)
// ListServicesMetadata returns list of services metadata for a cloud provider attached with the integrationID. // GetServicesMetadata returns list of services metadata for a cloud provider attached with the integrationID.
// This just returns a summary of the service and not the whole service definition // This just returns a summary of the service and not the whole service definition
ListServicesMetadata(ctx context.Context, orgID valuer.UUID, integrationID *valuer.UUID) (*cloudintegrationtypes.ServicesMetadata, error) GetServicesMetadata(ctx context.Context, orgID valuer.UUID, integrationID *valuer.UUID) ([]*citypes.ServiceMetadata, error)
// GetService returns service definition details for a serviceID. This returns config and // GetService returns service definition details for a serviceID. This returns config and
// other details required to show in service details page on web client. // other details required to show in service details page on web client.
GetService(ctx context.Context, orgID valuer.UUID, integrationID *valuer.UUID, serviceID string) (*cloudintegrationtypes.Service, error) GetService(ctx context.Context, orgID valuer.UUID, integrationID *valuer.UUID, serviceID string) (*citypes.Service, error)
// UpdateServiceConfig updates cloud integration service config // UpdateService updates cloud integration service
UpdateServiceConfig( UpdateService(ctx context.Context, orgID valuer.UUID, service *citypes.CloudIntegrationService) error
ctx context.Context,
orgID valuer.UUID,
serviceID string,
config *cloudintegrationtypes.UpdateServiceConfigRequest,
) (*cloudintegrationtypes.UpdateServiceConfigResponse, error)
// AgentCheckIn is called by agent to heartbeat and get latest config in response. // AgentCheckIn is called by agent to heartbeat and get latest config in response.
AgentCheckIn( AgentCheckIn(ctx context.Context, orgID valuer.UUID, req *citypes.AgentCheckInRequest) (*citypes.AgentCheckInResponse, error)
ctx context.Context,
orgID valuer.UUID,
req *cloudintegrationtypes.AgentCheckInRequest,
) (*cloudintegrationtypes.AgentCheckInResponse, error)
// GetDashboardByID returns dashboard JSON for a given dashboard id. // GetDashboardByID returns dashboard JSON for a given dashboard id.
// this only returns the dashboard when the service (embedded in dashboard id) is enabled // this only returns the dashboard when the service (embedded in dashboard id) is enabled
@@ -70,13 +53,13 @@ type Module interface {
} }
type Handler interface { type Handler interface {
AgentCheckIn(http.ResponseWriter, *http.Request) GetConnectionArtifact(http.ResponseWriter, *http.Request)
GenerateConnectionArtifact(http.ResponseWriter, *http.Request) GetAccounts(http.ResponseWriter, *http.Request)
ListConnectedAccounts(http.ResponseWriter, *http.Request) GetAccount(http.ResponseWriter, *http.Request)
GetAccountStatus(http.ResponseWriter, *http.Request) UpdateAccount(http.ResponseWriter, *http.Request)
ListServices(http.ResponseWriter, *http.Request)
GetServiceDetails(http.ResponseWriter, *http.Request)
UpdateAccountConfig(http.ResponseWriter, *http.Request)
UpdateServiceConfig(http.ResponseWriter, *http.Request)
DisconnectAccount(http.ResponseWriter, *http.Request) DisconnectAccount(http.ResponseWriter, *http.Request)
GetServicesMetadata(http.ResponseWriter, *http.Request)
GetService(http.ResponseWriter, *http.Request)
UpdateService(http.ResponseWriter, *http.Request)
AgentCheckIn(http.ResponseWriter, *http.Request)
} }

View File

@@ -0,0 +1,134 @@
package implcloudintegration
import (
"context"
"time"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/cloudintegrationtypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type store struct {
store sqlstore.SQLStore
}
func NewStore(sqlStore sqlstore.SQLStore) cloudintegrationtypes.Store {
return &store{store: sqlStore}
}
func (s *store) GetAccountByID(ctx context.Context, orgID, id valuer.UUID, provider cloudintegrationtypes.CloudProviderType) (*cloudintegrationtypes.StorableCloudIntegration, error) {
account := new(cloudintegrationtypes.StorableCloudIntegration)
err := s.store.BunDBCtx(ctx).NewSelect().Model(account).
Where("id = ?", id).
Where("org_id = ?", orgID).
Where("provider = ?", provider).
Scan(ctx)
if err != nil {
return nil, s.store.WrapNotFoundErrf(err, cloudintegrationtypes.ErrCodeCloudIntegrationNotFound, "cloud integration account with id %s not found", id)
}
return account, nil
}
func (s *store) CreateAccount(ctx context.Context, account *cloudintegrationtypes.StorableCloudIntegration) (*cloudintegrationtypes.StorableCloudIntegration, error) {
_, err := s.store.BunDBCtx(ctx).NewInsert().Model(account).Exec(ctx)
if err != nil {
return nil, s.store.WrapAlreadyExistsErrf(err, cloudintegrationtypes.ErrCodeCloudIntegrationAlreadyExists, "cloud integration account with id %s already exists", account.ID)
}
return account, nil
}
func (s *store) UpdateAccount(ctx context.Context, account *cloudintegrationtypes.StorableCloudIntegration) error {
_, err := s.store.BunDBCtx(ctx).
NewUpdate().
Model(account).
WherePK().
Where("org_id = ?", account.OrgID).
Where("provider = ?", account.Provider).
Exec(ctx)
return err
}
func (s *store) RemoveAccount(ctx context.Context, orgID, id valuer.UUID, provider cloudintegrationtypes.CloudProviderType) error {
_, err := s.store.BunDBCtx(ctx).NewUpdate().Model(new(cloudintegrationtypes.StorableCloudIntegration)).
Set("removed_at = ?", time.Now()).
Where("id = ?", id).
Where("org_id = ?", orgID).
Where("provider = ?", provider).
Exec(ctx)
return err
}
func (s *store) GetConnectedAccounts(ctx context.Context, orgID valuer.UUID, provider cloudintegrationtypes.CloudProviderType) ([]*cloudintegrationtypes.StorableCloudIntegration, error) {
var accounts []*cloudintegrationtypes.StorableCloudIntegration
err := s.store.BunDBCtx(ctx).NewSelect().Model(&accounts).
Where("org_id = ?", orgID).
Where("provider = ?", provider).
Where("removed_at IS NULL").
Where("account_id IS NOT NULL").
Where("last_agent_report IS NOT NULL").
Order("created_at ASC").
Scan(ctx)
if err != nil {
return nil, err
}
return accounts, nil
}
func (s *store) GetConnectedAccount(ctx context.Context, orgID valuer.UUID, provider cloudintegrationtypes.CloudProviderType, providerAccountID string) (*cloudintegrationtypes.StorableCloudIntegration, error) {
account := new(cloudintegrationtypes.StorableCloudIntegration)
err := s.store.BunDBCtx(ctx).NewSelect().Model(account).
Where("org_id = ?", orgID).
Where("provider = ?", provider).
Where("account_id = ?", providerAccountID).
Where("last_agent_report IS NOT NULL").
Where("removed_at IS NULL").
Scan(ctx)
if err != nil {
return nil, s.store.WrapNotFoundErrf(err, cloudintegrationtypes.ErrCodeCloudIntegrationNotFound, "connected account with provider account id %s not found", providerAccountID)
}
return account, nil
}
func (s *store) GetServiceByServiceID(ctx context.Context, cloudIntegrationID valuer.UUID, serviceID cloudintegrationtypes.ServiceID) (*cloudintegrationtypes.StorableCloudIntegrationService, error) {
service := new(cloudintegrationtypes.StorableCloudIntegrationService)
err := s.store.BunDBCtx(ctx).NewSelect().Model(service).
Where("cloud_integration_id = ?", cloudIntegrationID).
Where("type = ?", serviceID).
Scan(ctx)
if err != nil {
return nil, s.store.WrapNotFoundErrf(err, cloudintegrationtypes.ErrCodeCloudIntegrationNotFound, "cloud integration service with id %s not found", serviceID)
}
return service, nil
}
func (s *store) CreateService(ctx context.Context, service *cloudintegrationtypes.StorableCloudIntegrationService) (*cloudintegrationtypes.StorableCloudIntegrationService, error) {
_, err := s.store.BunDBCtx(ctx).NewInsert().Model(service).Exec(ctx)
if err != nil {
return nil, s.store.WrapAlreadyExistsErrf(err, cloudintegrationtypes.ErrCodeCloudIntegrationServiceAlreadyExists, "cloud integration service with id %s already exists for integration account", service.Type)
}
return service, nil
}
func (s *store) UpdateService(ctx context.Context, service *cloudintegrationtypes.StorableCloudIntegrationService) error {
_, err := s.store.BunDBCtx(ctx).NewUpdate().Model(service).
WherePK().
Where("cloud_integration_id = ?", service.CloudIntegrationID).
Where("type = ?", service.Type).
Exec(ctx)
return err
}
func (s *store) GetServices(ctx context.Context, cloudIntegrationID valuer.UUID) ([]*cloudintegrationtypes.StorableCloudIntegrationService, error) {
var services []*cloudintegrationtypes.StorableCloudIntegrationService
err := s.store.BunDBCtx(ctx).NewSelect().Model(&services).
Where("cloud_integration_id = ?", cloudIntegrationID).
Scan(ctx)
if err != nil {
return nil, err
}
return services, nil
}

View File

@@ -7,13 +7,14 @@ import (
"strings" "strings"
"time" "time"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/opamptypes" "github.com/SigNoz/signoz/pkg/types/opamptypes"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
) )
@@ -121,7 +122,7 @@ func (r *Repo) insertConfig(
// allowing empty elements for logs - use case is deleting all pipelines // allowing empty elements for logs - use case is deleting all pipelines
if len(elements) == 0 && c.ElementType != opamptypes.ElementTypeLogPipelines { if len(elements) == 0 && c.ElementType != opamptypes.ElementTypeLogPipelines {
zap.L().Error("insert config called with no elements ", zap.String("ElementType", c.ElementType.StringValue())) slog.ErrorContext(ctx, "insert config called with no elements", "element_type", c.ElementType.StringValue())
return errors.NewInvalidInputf(CodeConfigElementsRequired, "config must have atleast one element") return errors.NewInvalidInputf(CodeConfigElementsRequired, "config must have atleast one element")
} }
@@ -129,13 +130,13 @@ func (r *Repo) insertConfig(
// the version can not be set by the user, we want to auto-assign the versions // the version can not be set by the user, we want to auto-assign the versions
// in a monotonically increasing order starting with 1. hence, we reject insert // in a monotonically increasing order starting with 1. hence, we reject insert
// requests with version anything other than 0. here, 0 indicates un-assigned // requests with version anything other than 0. here, 0 indicates un-assigned
zap.L().Error("invalid version assignment while inserting agent config", zap.Int("version", c.Version), zap.String("ElementType", c.ElementType.StringValue())) slog.ErrorContext(ctx, "invalid version assignment while inserting agent config", "version", c.Version, "element_type", c.ElementType.StringValue())
return errors.NewInvalidInputf(errors.CodeInvalidInput, "user defined versions are not supported in the agent config") return errors.NewInvalidInputf(errors.CodeInvalidInput, "user defined versions are not supported in the agent config")
} }
configVersion, err := r.GetLatestVersion(ctx, orgId, c.ElementType) configVersion, err := r.GetLatestVersion(ctx, orgId, c.ElementType)
if err != nil && !errors.Ast(err, errors.TypeNotFound) { if err != nil && !errors.Ast(err, errors.TypeNotFound) {
zap.L().Error("failed to fetch latest config version", zap.Error(err)) slog.ErrorContext(ctx, "failed to fetch latest config version", "error", err)
return err return err
} }
@@ -155,11 +156,11 @@ func (r *Repo) insertConfig(
// Delete elements first, then version (to respect potential foreign key constraints) // Delete elements first, then version (to respect potential foreign key constraints)
_, delErr := r.store.BunDB().NewDelete().Model(new(opamptypes.AgentConfigElement)).Where("version_id = ?", c.ID).Exec(ctx) _, delErr := r.store.BunDB().NewDelete().Model(new(opamptypes.AgentConfigElement)).Where("version_id = ?", c.ID).Exec(ctx)
if delErr != nil { if delErr != nil {
zap.L().Error("failed to delete config elements during cleanup", zap.Error(delErr), zap.String("version_id", c.ID.String())) slog.ErrorContext(ctx, "failed to delete config elements during cleanup", "error", delErr, "version_id", c.ID.String())
} }
_, delErr = r.store.BunDB().NewDelete().Model(new(opamptypes.AgentConfigVersion)).Where("id = ?", c.ID).Where("org_id = ?", orgId).Exec(ctx) _, delErr = r.store.BunDB().NewDelete().Model(new(opamptypes.AgentConfigVersion)).Where("id = ?", c.ID).Where("org_id = ?", orgId).Exec(ctx)
if delErr != nil { if delErr != nil {
zap.L().Error("failed to delete config version during cleanup", zap.Error(delErr), zap.String("version_id", c.ID.String())) slog.ErrorContext(ctx, "failed to delete config version during cleanup", "error", delErr, "version_id", c.ID.String())
} }
} }
}() }()
@@ -170,7 +171,7 @@ func (r *Repo) insertConfig(
Model(c). Model(c).
Exec(ctx) Exec(ctx)
if dbErr != nil { if dbErr != nil {
zap.L().Error("error in inserting config version: ", zap.Error(dbErr)) slog.ErrorContext(ctx, "error in inserting config version", "error", dbErr)
return errors.WrapInternalf(dbErr, CodeConfigVersionInsertFailed, "failed to insert config version") return errors.WrapInternalf(dbErr, CodeConfigVersionInsertFailed, "failed to insert config version")
} }
@@ -221,7 +222,7 @@ func (r *Repo) updateDeployStatus(ctx context.Context,
Where("org_id = ?", orgId). Where("org_id = ?", orgId).
Exec(ctx) Exec(ctx)
if err != nil { if err != nil {
zap.L().Error("failed to update deploy status", zap.Error(err)) slog.ErrorContext(ctx, "failed to update deploy status", "error", err)
return model.BadRequest(fmt.Errorf("failed to update deploy status")) return model.BadRequest(fmt.Errorf("failed to update deploy status"))
} }
@@ -239,7 +240,7 @@ func (r *Repo) updateDeployStatusByHash(
Where("org_id = ?", orgId). Where("org_id = ?", orgId).
Exec(ctx) Exec(ctx)
if err != nil { if err != nil {
zap.L().Error("failed to update deploy status", zap.Error(err)) slog.ErrorContext(ctx, "failed to update deploy status", "error", err)
return errors.WrapInternalf(err, CodeConfigDeployStatusUpdateFailed, "failed to update deploy status") return errors.WrapInternalf(err, CodeConfigDeployStatusUpdateFailed, "failed to update deploy status")
} }

View File

@@ -4,6 +4,7 @@ import (
"context" "context"
"crypto/sha256" "crypto/sha256"
"fmt" "fmt"
"log/slog"
"strings" "strings"
"sync" "sync"
"sync/atomic" "sync/atomic"
@@ -17,7 +18,6 @@ import (
"github.com/SigNoz/signoz/pkg/types/opamptypes" "github.com/SigNoz/signoz/pkg/types/opamptypes"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid" "github.com/google/uuid"
"go.uber.org/zap"
yaml "gopkg.in/yaml.v3" yaml "gopkg.in/yaml.v3"
) )
@@ -36,7 +36,8 @@ type AgentFeatureType string
type Manager struct { type Manager struct {
Repo Repo
// lock to make sure only one update is sent to remote agents at a time // lock to make sure only one update is sent to remote agents at a time
lock uint32 lock uint32
logger *slog.Logger
// For AgentConfigProvider implementation // For AgentConfigProvider implementation
agentFeatures []AgentFeature agentFeatures []AgentFeature
@@ -67,6 +68,7 @@ func Initiate(options *ManagerOptions) (*Manager, error) {
m = &Manager{ m = &Manager{
Repo: Repo{options.Store}, Repo: Repo{options.Store},
logger: slog.Default(),
agentFeatures: options.AgentFeatures, agentFeatures: options.AgentFeatures,
configSubscribers: map[string]func(){}, configSubscribers: map[string]func(){},
} }
@@ -222,19 +224,19 @@ func NotifyConfigUpdate(ctx context.Context) {
func Redeploy(ctx context.Context, orgId valuer.UUID, typ opamptypes.ElementType, version int) error { func Redeploy(ctx context.Context, orgId valuer.UUID, typ opamptypes.ElementType, version int) error {
configVersion, err := GetConfigVersion(ctx, orgId, typ, version) configVersion, err := GetConfigVersion(ctx, orgId, typ, version)
if err != nil { if err != nil {
zap.L().Error("failed to fetch config version during redeploy", zap.Error(err)) slog.ErrorContext(ctx, "failed to fetch config version during redeploy", "error", err)
return err return err
} }
if configVersion == nil || (configVersion != nil && configVersion.Config == "") { if configVersion == nil || (configVersion != nil && configVersion.Config == "") {
zap.L().Debug("config version has no conf yaml", zap.Any("configVersion", configVersion)) slog.DebugContext(ctx, "config version has no conf yaml", "config_version", configVersion)
return errors.NewInvalidInputf(CodeConfigVersionNoConfig, "the config version can not be redeployed") return errors.NewInvalidInputf(CodeConfigVersionNoConfig, "the config version can not be redeployed")
} }
switch typ { switch typ {
case opamptypes.ElementTypeSamplingRules: case opamptypes.ElementTypeSamplingRules:
var config *tsp.Config var config *tsp.Config
if err := yaml.Unmarshal([]byte(configVersion.Config), &config); err != nil { if err := yaml.Unmarshal([]byte(configVersion.Config), &config); err != nil {
zap.L().Debug("failed to read last conf correctly", zap.Error(err)) slog.DebugContext(ctx, "failed to read last conf correctly", "error", err)
return model.BadRequest(fmt.Errorf("failed to read the stored config correctly")) return model.BadRequest(fmt.Errorf("failed to read the stored config correctly"))
} }
@@ -246,7 +248,7 @@ func Redeploy(ctx context.Context, orgId valuer.UUID, typ opamptypes.ElementType
opamp.AddToTracePipelineSpec("signoz_tail_sampling") opamp.AddToTracePipelineSpec("signoz_tail_sampling")
configHash, err := opamp.UpsertControlProcessors(ctx, "traces", processorConf, m.OnConfigUpdate) configHash, err := opamp.UpsertControlProcessors(ctx, "traces", processorConf, m.OnConfigUpdate)
if err != nil { if err != nil {
zap.L().Error("failed to call agent config update for trace processor", zap.Error(err)) slog.ErrorContext(ctx, "failed to call agent config update for trace processor", "error", err)
return errors.WithAdditionalf(err, "failed to deploy the config") return errors.WithAdditionalf(err, "failed to deploy the config")
} }
@@ -254,7 +256,7 @@ func Redeploy(ctx context.Context, orgId valuer.UUID, typ opamptypes.ElementType
case opamptypes.ElementTypeDropRules: case opamptypes.ElementTypeDropRules:
var filterConfig *filterprocessor.Config var filterConfig *filterprocessor.Config
if err := yaml.Unmarshal([]byte(configVersion.Config), &filterConfig); err != nil { if err := yaml.Unmarshal([]byte(configVersion.Config), &filterConfig); err != nil {
zap.L().Error("failed to read last conf correctly", zap.Error(err)) slog.ErrorContext(ctx, "failed to read last conf correctly", "error", err)
return model.InternalError(fmt.Errorf("failed to read the stored config correctly")) return model.InternalError(fmt.Errorf("failed to read the stored config correctly"))
} }
processorConf := map[string]interface{}{ processorConf := map[string]interface{}{
@@ -264,7 +266,7 @@ func Redeploy(ctx context.Context, orgId valuer.UUID, typ opamptypes.ElementType
opamp.AddToMetricsPipelineSpec("filter") opamp.AddToMetricsPipelineSpec("filter")
configHash, err := opamp.UpsertControlProcessors(ctx, "metrics", processorConf, m.OnConfigUpdate) configHash, err := opamp.UpsertControlProcessors(ctx, "metrics", processorConf, m.OnConfigUpdate)
if err != nil { if err != nil {
zap.L().Error("failed to call agent config update for trace processor", zap.Error(err)) slog.ErrorContext(ctx, "failed to call agent config update for trace processor", "error", err)
return err return err
} }
@@ -290,13 +292,13 @@ func UpsertFilterProcessor(ctx context.Context, orgId valuer.UUID, version int,
opamp.AddToMetricsPipelineSpec("filter") opamp.AddToMetricsPipelineSpec("filter")
configHash, err := opamp.UpsertControlProcessors(ctx, "metrics", processorConf, m.OnConfigUpdate) configHash, err := opamp.UpsertControlProcessors(ctx, "metrics", processorConf, m.OnConfigUpdate)
if err != nil { if err != nil {
zap.L().Error("failed to call agent config update for trace processor", zap.Error(err)) slog.ErrorContext(ctx, "failed to call agent config update for trace processor", "error", err)
return err return err
} }
processorConfYaml, yamlErr := yaml.Marshal(config) processorConfYaml, yamlErr := yaml.Marshal(config)
if yamlErr != nil { if yamlErr != nil {
zap.L().Warn("unexpected error while transforming processor config to yaml", zap.Error(yamlErr)) slog.WarnContext(ctx, "unexpected error while transforming processor config to yaml", "error", yamlErr)
} }
m.updateDeployStatus(ctx, orgId, opamptypes.ElementTypeDropRules, version, opamptypes.DeployInitiated.StringValue(), "Deployment started", configHash, string(processorConfYaml)) m.updateDeployStatus(ctx, orgId, opamptypes.ElementTypeDropRules, version, opamptypes.DeployInitiated.StringValue(), "Deployment started", configHash, string(processorConfYaml))
@@ -315,7 +317,7 @@ func (m *Manager) OnConfigUpdate(orgId valuer.UUID, agentId string, hash string,
message := "Deployment was successful" message := "Deployment was successful"
defer func() { defer func() {
zap.L().Info(status, zap.String("agentId", agentId), zap.String("agentResponse", message)) m.logger.Info(status, "agent_id", agentId, "agent_response", message)
}() }()
if err != nil { if err != nil {
@@ -341,13 +343,13 @@ func UpsertSamplingProcessor(ctx context.Context, orgId valuer.UUID, version int
opamp.AddToTracePipelineSpec("signoz_tail_sampling") opamp.AddToTracePipelineSpec("signoz_tail_sampling")
configHash, err := opamp.UpsertControlProcessors(ctx, "traces", processorConf, m.OnConfigUpdate) configHash, err := opamp.UpsertControlProcessors(ctx, "traces", processorConf, m.OnConfigUpdate)
if err != nil { if err != nil {
zap.L().Error("failed to call agent config update for trace processor", zap.Error(err)) slog.ErrorContext(ctx, "failed to call agent config update for trace processor", "error", err)
return err return err
} }
processorConfYaml, yamlErr := yaml.Marshal(config) processorConfYaml, yamlErr := yaml.Marshal(config)
if yamlErr != nil { if yamlErr != nil {
zap.L().Warn("unexpected error while transforming processor config to yaml", zap.Error(yamlErr)) slog.WarnContext(ctx, "unexpected error while transforming processor config to yaml", "error", yamlErr)
} }
m.updateDeployStatus(ctx, orgId, opamptypes.ElementTypeSamplingRules, version, opamptypes.DeployInitiated.StringValue(), "Deployment started", configHash, string(processorConfYaml)) m.updateDeployStatus(ctx, orgId, opamptypes.ElementTypeSamplingRules, version, opamptypes.DeployInitiated.StringValue(), "Deployment started", configHash, string(processorConfYaml))

View File

@@ -11,7 +11,6 @@ import (
"github.com/SigNoz/signoz-otel-collector/utils/fingerprint" "github.com/SigNoz/signoz-otel-collector/utils/fingerprint"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"go.uber.org/zap"
) )
func (r *ClickHouseReader) GetQBFilterSuggestionsForLogs( func (r *ClickHouseReader) GetQBFilterSuggestionsForLogs(
@@ -79,7 +78,7 @@ func (r *ClickHouseReader) GetQBFilterSuggestionsForLogs(
) )
if err != nil { if err != nil {
// Do not fail the entire request if only example query generation fails // Do not fail the entire request if only example query generation fails
zap.L().Error("could not find attribute values for creating example query", zap.Error(err)) r.logger.ErrorContext(ctx, "could not find attribute values for creating example query", "error", err)
} else { } else {
// add example queries for as many attributes as possible. // add example queries for as many attributes as possible.
@@ -159,10 +158,7 @@ func (r *ClickHouseReader) getValuesForLogAttributes(
*/ */
if len(attributes) > 10 { if len(attributes) > 10 {
zap.L().Error( r.logger.ErrorContext(ctx, "log attribute values requested for too many attributes. This can lead to slow and costly queries", "count", len(attributes))
"log attribute values requested for too many attributes. This can lead to slow and costly queries",
zap.Int("count", len(attributes)),
)
attributes = attributes[:10] attributes = attributes[:10]
} }
@@ -187,7 +183,7 @@ func (r *ClickHouseReader) getValuesForLogAttributes(
rows, err := r.db.Query(ctx, query, tagKeyQueryArgs...) rows, err := r.db.Query(ctx, query, tagKeyQueryArgs...)
if err != nil { if err != nil {
zap.L().Error("couldn't query attrib values for suggestions", zap.Error(err)) r.logger.ErrorContext(ctx, "couldn't query attrib values for suggestions", "error", err)
return nil, model.InternalError(fmt.Errorf( return nil, model.InternalError(fmt.Errorf(
"couldn't query attrib values for suggestions: %w", err, "couldn't query attrib values for suggestions: %w", err,
)) ))

View File

@@ -2,17 +2,18 @@ package queryprogress
import ( import (
"fmt" "fmt"
"log/slog"
"sync" "sync"
"github.com/ClickHouse/clickhouse-go/v2" "github.com/ClickHouse/clickhouse-go/v2"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/google/uuid" "github.com/google/uuid"
"go.uber.org/zap"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
) )
// tracks progress and manages subscriptions for all queries // tracks progress and manages subscriptions for all queries
type inMemoryQueryProgressTracker struct { type inMemoryQueryProgressTracker struct {
logger *slog.Logger
queries map[string]*queryTracker queries map[string]*queryTracker
lock sync.RWMutex lock sync.RWMutex
} }
@@ -30,7 +31,7 @@ func (tracker *inMemoryQueryProgressTracker) ReportQueryStarted(
)) ))
} }
tracker.queries[queryId] = newQueryTracker(queryId) tracker.queries[queryId] = newQueryTracker(tracker.logger, queryId)
return func() { return func() {
tracker.onQueryFinished(queryId) tracker.onQueryFinished(queryId)
@@ -93,6 +94,7 @@ func (tracker *inMemoryQueryProgressTracker) getQueryTracker(
// Tracks progress and manages subscriptions for a single query // Tracks progress and manages subscriptions for a single query
type queryTracker struct { type queryTracker struct {
logger *slog.Logger
queryId string queryId string
isFinished bool isFinished bool
@@ -102,8 +104,9 @@ type queryTracker struct {
lock sync.Mutex lock sync.Mutex
} }
func newQueryTracker(queryId string) *queryTracker { func newQueryTracker(logger *slog.Logger, queryId string) *queryTracker {
return &queryTracker{ return &queryTracker{
logger: logger,
queryId: queryId, queryId: queryId,
subscriptions: map[string]*queryProgressSubscription{}, subscriptions: map[string]*queryProgressSubscription{},
} }
@@ -114,10 +117,7 @@ func (qt *queryTracker) handleProgressUpdate(p *clickhouse.Progress) {
defer qt.lock.Unlock() defer qt.lock.Unlock()
if qt.isFinished { if qt.isFinished {
zap.L().Warn( qt.logger.Warn("received clickhouse progress update for finished query", "queryId", qt.queryId, "progress", p)
"received clickhouse progress update for finished query",
zap.String("queryId", qt.queryId), zap.Any("progress", p),
)
return return
} }
@@ -146,7 +146,7 @@ func (qt *queryTracker) subscribe() (
} }
subscriberId := uuid.NewString() subscriberId := uuid.NewString()
subscription := newQueryProgressSubscription() subscription := newQueryProgressSubscription(qt.logger)
qt.subscriptions[subscriberId] = subscription qt.subscriptions[subscriberId] = subscription
if qt.progress != nil { if qt.progress != nil {
@@ -163,11 +163,7 @@ func (qt *queryTracker) unsubscribe(subscriberId string) {
defer qt.lock.Unlock() defer qt.lock.Unlock()
if qt.isFinished { if qt.isFinished {
zap.L().Debug( qt.logger.Debug("received unsubscribe request after query finished", "subscriber", subscriberId, "queryId", qt.queryId)
"received unsubscribe request after query finished",
zap.String("subscriber", subscriberId),
zap.String("queryId", qt.queryId),
)
return return
} }
@@ -183,10 +179,7 @@ func (qt *queryTracker) onFinished() {
defer qt.lock.Unlock() defer qt.lock.Unlock()
if qt.isFinished { if qt.isFinished {
zap.L().Warn( qt.logger.Warn("receiver query finish report after query finished", "queryId", qt.queryId)
"receiver query finish report after query finished",
zap.String("queryId", qt.queryId),
)
return return
} }
@@ -199,15 +192,17 @@ func (qt *queryTracker) onFinished() {
} }
type queryProgressSubscription struct { type queryProgressSubscription struct {
logger *slog.Logger
ch chan model.QueryProgress ch chan model.QueryProgress
isClosed bool isClosed bool
lock sync.Mutex lock sync.Mutex
} }
func newQueryProgressSubscription() *queryProgressSubscription { func newQueryProgressSubscription(logger *slog.Logger) *queryProgressSubscription {
ch := make(chan model.QueryProgress, 1000) ch := make(chan model.QueryProgress, 1000)
return &queryProgressSubscription{ return &queryProgressSubscription{
ch: ch, logger: logger,
ch: ch,
} }
} }
@@ -217,10 +212,7 @@ func (ch *queryProgressSubscription) send(progress model.QueryProgress) {
defer ch.lock.Unlock() defer ch.lock.Unlock()
if ch.isClosed { if ch.isClosed {
zap.L().Error( ch.logger.Error("can't send query progress: channel already closed.", "progress", progress)
"can't send query progress: channel already closed.",
zap.Any("progress", progress),
)
return return
} }
@@ -228,12 +220,9 @@ func (ch *queryProgressSubscription) send(progress model.QueryProgress) {
// blocking while sending doesn't happen in the happy path // blocking while sending doesn't happen in the happy path
select { select {
case ch.ch <- progress: case ch.ch <- progress:
zap.L().Debug("published query progress", zap.Any("progress", progress)) ch.logger.Debug("published query progress", "progress", progress)
default: default:
zap.L().Error( ch.logger.Error("couldn't publish query progress. dropping update.", "progress", progress)
"couldn't publish query progress. dropping update.",
zap.Any("progress", progress),
)
} }
} }

View File

@@ -1,6 +1,8 @@
package queryprogress package queryprogress
import ( import (
"log/slog"
"github.com/ClickHouse/clickhouse-go/v2" "github.com/ClickHouse/clickhouse-go/v2"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
) )
@@ -21,10 +23,11 @@ type QueryProgressTracker interface {
SubscribeToQueryProgress(queryId string) (ch <-chan model.QueryProgress, unsubscribe func(), apiErr *model.ApiError) SubscribeToQueryProgress(queryId string) (ch <-chan model.QueryProgress, unsubscribe func(), apiErr *model.ApiError)
} }
func NewQueryProgressTracker() QueryProgressTracker { func NewQueryProgressTracker(logger *slog.Logger) QueryProgressTracker {
// InMemory tracker is useful only for single replica query service setups. // InMemory tracker is useful only for single replica query service setups.
// Multi replica setups must use a centralized store for tracking and subscribing to query progress // Multi replica setups must use a centralized store for tracking and subscribing to query progress
return &inMemoryQueryProgressTracker{ return &inMemoryQueryProgressTracker{
logger: logger,
queries: map[string]*queryTracker{}, queries: map[string]*queryTracker{},
} }
} }

View File

@@ -1,6 +1,7 @@
package queryprogress package queryprogress
import ( import (
"log/slog"
"testing" "testing"
"time" "time"
@@ -12,7 +13,7 @@ import (
func TestQueryProgressTracking(t *testing.T) { func TestQueryProgressTracking(t *testing.T) {
require := require.New(t) require := require.New(t)
tracker := NewQueryProgressTracker() tracker := NewQueryProgressTracker(slog.Default())
testQueryId := "test-query" testQueryId := "test-query"

File diff suppressed because it is too large Load Diff

View File

@@ -13,6 +13,7 @@ import (
"github.com/SigNoz/signoz/pkg/queryparser" "github.com/SigNoz/signoz/pkg/queryparser"
"io" "io"
"log/slog"
"math" "math"
"net/http" "net/http"
"regexp" "regexp"
@@ -73,8 +74,6 @@ import (
"github.com/SigNoz/signoz/pkg/types/ruletypes" "github.com/SigNoz/signoz/pkg/types/ruletypes"
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunneltypes" traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunneltypes"
"go.uber.org/zap"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/kafka" "github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/kafka"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline" "github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/interfaces" "github.com/SigNoz/signoz/pkg/query-service/interfaces"
@@ -97,6 +96,7 @@ func NewRouter() *mux.Router {
// APIHandler implements the query service public API // APIHandler implements the query service public API
type APIHandler struct { type APIHandler struct {
logger *slog.Logger
reader interfaces.Reader reader interfaces.Reader
ruleManager *rules.Manager ruleManager *rules.Manager
querier interfaces.Querier querier interfaces.Querier
@@ -212,6 +212,7 @@ func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, erro
//quickFilterModule := quickfilter.NewAPI(opts.QuickFilterModule) //quickFilterModule := quickfilter.NewAPI(opts.QuickFilterModule)
aH := &APIHandler{ aH := &APIHandler{
logger: slog.Default(),
reader: opts.Reader, reader: opts.Reader,
temporalityMap: make(map[string]map[v3.Temporality]bool), temporalityMap: make(map[string]map[v3.Temporality]bool),
ruleManager: opts.RuleManager, ruleManager: opts.RuleManager,
@@ -251,13 +252,13 @@ func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, erro
// TODO(nitya): remote this in later for multitenancy. // TODO(nitya): remote this in later for multitenancy.
orgs, err := opts.Signoz.Modules.OrgGetter.ListByOwnedKeyRange(context.Background()) orgs, err := opts.Signoz.Modules.OrgGetter.ListByOwnedKeyRange(context.Background())
if err != nil { if err != nil {
zap.L().Warn("unexpected error while fetching orgs while initializing base api handler", zap.Error(err)) aH.logger.Warn("unexpected error while fetching orgs while initializing base api handler", "error", err)
} }
// if the first org with the first user is created then the setup is complete. // if the first org with the first user is created then the setup is complete.
if len(orgs) == 1 { if len(orgs) == 1 {
count, err := opts.Signoz.Modules.UserGetter.CountByOrgID(context.Background(), orgs[0].ID) count, err := opts.Signoz.Modules.UserGetter.CountByOrgID(context.Background(), orgs[0].ID)
if err != nil { if err != nil {
zap.L().Warn("unexpected error while fetch user count while initializing base api handler", zap.Error(err)) aH.logger.Warn("unexpected error while fetching user count while initializing base api handler", "error", err)
} }
if count > 0 { if count > 0 {
@@ -312,7 +313,7 @@ func RespondError(w http.ResponseWriter, apiErr model.BaseApiError, data interfa
Data: data, Data: data,
}) })
if err != nil { if err != nil {
zap.L().Error("error marshalling json response", zap.Error(err)) slog.Error("error marshalling json response", "error", err)
http.Error(w, err.Error(), http.StatusInternalServerError) http.Error(w, err.Error(), http.StatusInternalServerError)
return return
} }
@@ -344,7 +345,7 @@ func RespondError(w http.ResponseWriter, apiErr model.BaseApiError, data interfa
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
w.WriteHeader(code) w.WriteHeader(code)
if n, err := w.Write(b); err != nil { if n, err := w.Write(b); err != nil {
zap.L().Error("error writing response", zap.Int("bytesWritten", n), zap.Error(err)) slog.Error("error writing response", "bytes_written", n, "error", err)
} }
} }
@@ -356,7 +357,7 @@ func writeHttpResponse(w http.ResponseWriter, data interface{}) {
Data: data, Data: data,
}) })
if err != nil { if err != nil {
zap.L().Error("error marshalling json response", zap.Error(err)) slog.Error("error marshalling json response", "error", err)
http.Error(w, err.Error(), http.StatusInternalServerError) http.Error(w, err.Error(), http.StatusInternalServerError)
return return
} }
@@ -364,7 +365,7 @@ func writeHttpResponse(w http.ResponseWriter, data interface{}) {
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)
if n, err := w.Write(b); err != nil { if n, err := w.Write(b); err != nil {
zap.L().Error("error writing response", zap.Int("bytesWritten", n), zap.Error(err)) slog.Error("error writing response", "bytes_written", n, "error", err)
} }
} }
@@ -936,14 +937,14 @@ func (aH *APIHandler) metaForLinks(ctx context.Context, rule *ruletypes.Gettable
} }
keys = model.GetLogFieldsV3(ctx, params, logFields) keys = model.GetLogFieldsV3(ctx, params, logFields)
} else { } else {
zap.L().Error("failed to get log fields using empty keys; the link might not work as expected", zap.Error(apiErr)) aH.logger.ErrorContext(ctx, "failed to get log fields using empty keys", "error", apiErr)
} }
} else if rule.AlertType == ruletypes.AlertTypeTraces { } else if rule.AlertType == ruletypes.AlertTypeTraces {
traceFields, err := aH.reader.GetSpanAttributeKeysByNames(ctx, logsv3.GetFieldNames(rule.PostableRule.RuleCondition.CompositeQuery)) traceFields, err := aH.reader.GetSpanAttributeKeysByNames(ctx, logsv3.GetFieldNames(rule.PostableRule.RuleCondition.CompositeQuery))
if err == nil { if err == nil {
keys = traceFields keys = traceFields
} else { } else {
zap.L().Error("failed to get span attributes using empty keys; the link might not work as expected", zap.Error(err)) aH.logger.ErrorContext(ctx, "failed to get span attributes using empty keys", "error", err)
} }
} }
@@ -1276,14 +1277,14 @@ func (aH *APIHandler) List(rw http.ResponseWriter, r *http.Request) {
installedIntegrationDashboards, apiErr := aH.IntegrationsController.GetDashboardsForInstalledIntegrations(ctx, orgID) installedIntegrationDashboards, apiErr := aH.IntegrationsController.GetDashboardsForInstalledIntegrations(ctx, orgID)
if apiErr != nil { if apiErr != nil {
zap.L().Error("failed to get dashboards for installed integrations", zap.Error(apiErr)) aH.logger.ErrorContext(ctx, "failed to get dashboards for installed integrations", "error", apiErr)
} else { } else {
dashboards = append(dashboards, installedIntegrationDashboards...) dashboards = append(dashboards, installedIntegrationDashboards...)
} }
cloudIntegrationDashboards, apiErr := aH.CloudIntegrationsController.AvailableDashboards(ctx, orgID) cloudIntegrationDashboards, apiErr := aH.CloudIntegrationsController.AvailableDashboards(ctx, orgID)
if apiErr != nil { if apiErr != nil {
zap.L().Error("failed to get dashboards for cloud integrations", zap.Error(apiErr)) aH.logger.ErrorContext(ctx, "failed to get dashboards for cloud integrations", "error", apiErr)
} else { } else {
dashboards = append(dashboards, cloudIntegrationDashboards...) dashboards = append(dashboards, cloudIntegrationDashboards...)
} }
@@ -1325,7 +1326,7 @@ func (aH *APIHandler) testRule(w http.ResponseWriter, r *http.Request) {
defer r.Body.Close() defer r.Body.Close()
body, err := io.ReadAll(r.Body) body, err := io.ReadAll(r.Body)
if err != nil { if err != nil {
zap.L().Error("Error in getting req body in test rule API", zap.Error(err)) aH.logger.ErrorContext(r.Context(), "error reading request body for test rule", "error", err)
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return return
} }
@@ -1377,7 +1378,7 @@ func (aH *APIHandler) patchRule(w http.ResponseWriter, r *http.Request) {
defer r.Body.Close() defer r.Body.Close()
body, err := io.ReadAll(r.Body) body, err := io.ReadAll(r.Body)
if err != nil { if err != nil {
zap.L().Error("error in getting req body of patch rule API\n", zap.Error(err)) aH.logger.ErrorContext(r.Context(), "error reading request body for patch rule", "error", err)
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return return
} }
@@ -1407,7 +1408,7 @@ func (aH *APIHandler) editRule(w http.ResponseWriter, r *http.Request) {
defer r.Body.Close() defer r.Body.Close()
body, err := io.ReadAll(r.Body) body, err := io.ReadAll(r.Body)
if err != nil { if err != nil {
zap.L().Error("error in getting req body of edit rule API", zap.Error(err)) aH.logger.ErrorContext(r.Context(), "error reading request body for edit rule", "error", err)
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return return
} }
@@ -1432,7 +1433,7 @@ func (aH *APIHandler) createRule(w http.ResponseWriter, r *http.Request) {
defer r.Body.Close() defer r.Body.Close()
body, err := io.ReadAll(r.Body) body, err := io.ReadAll(r.Body)
if err != nil { if err != nil {
zap.L().Error("Error in getting req body for create rule API", zap.Error(err)) aH.logger.ErrorContext(r.Context(), "error reading request body for create rule", "error", err)
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return return
} }
@@ -1456,7 +1457,7 @@ func (aH *APIHandler) queryRangeMetrics(w http.ResponseWriter, r *http.Request)
return return
} }
// zap.L().Info(query, apiError) // TODO: add structured logging for query and apiError if needed
ctx := r.Context() ctx := r.Context()
if to := r.FormValue("timeout"); to != "" { if to := r.FormValue("timeout"); to != "" {
@@ -1478,7 +1479,7 @@ func (aH *APIHandler) queryRangeMetrics(w http.ResponseWriter, r *http.Request)
} }
if res.Err != nil { if res.Err != nil {
zap.L().Error("error in query range metrics", zap.Error(res.Err)) aH.logger.ErrorContext(r.Context(), "error in query range metrics", "error", res.Err)
} }
if res.Err != nil { if res.Err != nil {
@@ -1511,7 +1512,7 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
return return
} }
// zap.L().Info(query, apiError) // TODO: add structured logging for query and apiError if needed
ctx := r.Context() ctx := r.Context()
if to := r.FormValue("timeout"); to != "" { if to := r.FormValue("timeout"); to != "" {
@@ -1533,7 +1534,7 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
} }
if res.Err != nil { if res.Err != nil {
zap.L().Error("error in query range metrics", zap.Error(res.Err)) aH.logger.ErrorContext(r.Context(), "error in query range metrics", "error", res.Err)
} }
if res.Err != nil { if res.Err != nil {
@@ -1636,7 +1637,7 @@ func (aH *APIHandler) getServicesTopLevelOps(w http.ResponseWriter, r *http.Requ
var params topLevelOpsParams var params topLevelOpsParams
err := json.NewDecoder(r.Body).Decode(&params) err := json.NewDecoder(r.Body).Decode(&params)
if err != nil { if err != nil {
zap.L().Error("Error in getting req body for get top operations API", zap.Error(err)) aH.logger.ErrorContext(r.Context(), "error reading request body for get top operations", "error", err)
} }
if params.Service != "" { if params.Service != "" {
@@ -2058,7 +2059,7 @@ func (aH *APIHandler) HandleError(w http.ResponseWriter, err error, statusCode i
return false return false
} }
if statusCode == http.StatusInternalServerError { if statusCode == http.StatusInternalServerError {
zap.L().Error("HTTP handler, Internal Server Error", zap.Error(err)) aH.logger.Error("internal server error in http handler", "error", err)
} }
structuredResp := structuredResponse{ structuredResp := structuredResponse{
Errors: []structuredError{ Errors: []structuredError{
@@ -2152,7 +2153,7 @@ func (aH *APIHandler) onboardProducers(
) { ) {
messagingQueue, apiErr := ParseKafkaQueueBody(r) messagingQueue, apiErr := ParseKafkaQueueBody(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error(apiErr.Err.Error()) aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2160,7 +2161,7 @@ func (aH *APIHandler) onboardProducers(
chq, err := kafka.BuildClickHouseQuery(messagingQueue, kafka.KafkaQueue, "onboard_producers") chq, err := kafka.BuildClickHouseQuery(messagingQueue, kafka.KafkaQueue, "onboard_producers")
if err != nil { if err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to build clickhouse query for onboard producers", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2254,7 +2255,7 @@ func (aH *APIHandler) onboardConsumers(
) { ) {
messagingQueue, apiErr := ParseKafkaQueueBody(r) messagingQueue, apiErr := ParseKafkaQueueBody(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error(apiErr.Err.Error()) aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2262,7 +2263,7 @@ func (aH *APIHandler) onboardConsumers(
chq, err := kafka.BuildClickHouseQuery(messagingQueue, kafka.KafkaQueue, "onboard_consumers") chq, err := kafka.BuildClickHouseQuery(messagingQueue, kafka.KafkaQueue, "onboard_consumers")
if err != nil { if err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to build clickhouse query for onboard consumers", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2401,7 +2402,7 @@ func (aH *APIHandler) onboardKafka(w http.ResponseWriter, r *http.Request) {
messagingQueue, apiErr := ParseKafkaQueueBody(r) messagingQueue, apiErr := ParseKafkaQueueBody(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error(apiErr.Err.Error()) aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2409,7 +2410,7 @@ func (aH *APIHandler) onboardKafka(w http.ResponseWriter, r *http.Request) {
queryRangeParams, err := kafka.BuildBuilderQueriesKafkaOnboarding(messagingQueue) queryRangeParams, err := kafka.BuildBuilderQueriesKafkaOnboarding(messagingQueue)
if err != nil { if err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to build kafka onboarding queries", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2511,19 +2512,19 @@ func (aH *APIHandler) getNetworkData(w http.ResponseWriter, r *http.Request) {
messagingQueue, apiErr := ParseKafkaQueueBody(r) messagingQueue, apiErr := ParseKafkaQueueBody(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error(apiErr.Err.Error()) aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
queryRangeParams, err := kafka.BuildQRParamsWithCache(messagingQueue, "throughput", attributeCache) queryRangeParams, err := kafka.BuildQRParamsWithCache(messagingQueue, "throughput", attributeCache)
if err != nil { if err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to build query range params for throughput", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil { if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to validate query range params", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2562,12 +2563,12 @@ func (aH *APIHandler) getNetworkData(w http.ResponseWriter, r *http.Request) {
queryRangeParams, err = kafka.BuildQRParamsWithCache(messagingQueue, "fetch-latency", attributeCache) queryRangeParams, err = kafka.BuildQRParamsWithCache(messagingQueue, "fetch-latency", attributeCache)
if err != nil { if err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to build query range params for fetch latency", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil { if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to validate query range params for fetch latency", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2622,7 +2623,7 @@ func (aH *APIHandler) getProducerData(w http.ResponseWriter, r *http.Request) {
messagingQueue, apiErr := ParseKafkaQueueBody(r) messagingQueue, apiErr := ParseKafkaQueueBody(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error(apiErr.Err.Error()) aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2636,13 +2637,13 @@ func (aH *APIHandler) getProducerData(w http.ResponseWriter, r *http.Request) {
queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "producer", kafkaSpanEval) queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "producer", kafkaSpanEval)
if err != nil { if err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to build query range params for producer", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil { if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to validate query range params for producer", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2679,7 +2680,7 @@ func (aH *APIHandler) getConsumerData(w http.ResponseWriter, r *http.Request) {
messagingQueue, apiErr := ParseKafkaQueueBody(r) messagingQueue, apiErr := ParseKafkaQueueBody(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error(apiErr.Err.Error()) aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2689,13 +2690,13 @@ func (aH *APIHandler) getConsumerData(w http.ResponseWriter, r *http.Request) {
queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "consumer", kafkaSpanEval) queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "consumer", kafkaSpanEval)
if err != nil { if err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to build query range params for consumer", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil { if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to validate query range params for consumer", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2737,7 +2738,7 @@ func (aH *APIHandler) getPartitionOverviewLatencyData(w http.ResponseWriter, r *
messagingQueue, apiErr := ParseKafkaQueueBody(r) messagingQueue, apiErr := ParseKafkaQueueBody(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error(apiErr.Err.Error()) aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2747,13 +2748,13 @@ func (aH *APIHandler) getPartitionOverviewLatencyData(w http.ResponseWriter, r *
queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "producer-topic-throughput", kafkaSpanEval) queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "producer-topic-throughput", kafkaSpanEval)
if err != nil { if err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to build query range params for producer topic throughput", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil { if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to validate query range params for producer topic throughput", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2795,7 +2796,7 @@ func (aH *APIHandler) getConsumerPartitionLatencyData(w http.ResponseWriter, r *
messagingQueue, apiErr := ParseKafkaQueueBody(r) messagingQueue, apiErr := ParseKafkaQueueBody(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error(apiErr.Err.Error()) aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2805,13 +2806,13 @@ func (aH *APIHandler) getConsumerPartitionLatencyData(w http.ResponseWriter, r *
queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "consumer_partition_latency", kafkaSpanEval) queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "consumer_partition_latency", kafkaSpanEval)
if err != nil { if err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to build query range params for consumer partition latency", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil { if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to validate query range params for consumer partition latency", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2855,7 +2856,7 @@ func (aH *APIHandler) getProducerThroughputOverview(w http.ResponseWriter, r *ht
messagingQueue, apiErr := ParseKafkaQueueBody(r) messagingQueue, apiErr := ParseKafkaQueueBody(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error(apiErr.Err.Error()) aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2866,13 +2867,13 @@ func (aH *APIHandler) getProducerThroughputOverview(w http.ResponseWriter, r *ht
producerQueryRangeParams, err := kafka.BuildQRParamsWithCache(messagingQueue, "producer-throughput-overview", attributeCache) producerQueryRangeParams, err := kafka.BuildQRParamsWithCache(messagingQueue, "producer-throughput-overview", attributeCache)
if err != nil { if err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to build query range params for producer throughput overview", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
if err := validateQueryRangeParamsV3(producerQueryRangeParams); err != nil { if err := validateQueryRangeParamsV3(producerQueryRangeParams); err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to validate query range params for producer throughput overview", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2908,12 +2909,12 @@ func (aH *APIHandler) getProducerThroughputOverview(w http.ResponseWriter, r *ht
queryRangeParams, err := kafka.BuildQRParamsWithCache(messagingQueue, "producer-throughput-overview-byte-rate", attributeCache) queryRangeParams, err := kafka.BuildQRParamsWithCache(messagingQueue, "producer-throughput-overview-byte-rate", attributeCache)
if err != nil { if err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to build query range params for producer throughput byte rate", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil { if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to validate query range params for producer throughput byte rate", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2971,7 +2972,7 @@ func (aH *APIHandler) getProducerThroughputDetails(w http.ResponseWriter, r *htt
messagingQueue, apiErr := ParseKafkaQueueBody(r) messagingQueue, apiErr := ParseKafkaQueueBody(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error(apiErr.Err.Error()) aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -2981,13 +2982,13 @@ func (aH *APIHandler) getProducerThroughputDetails(w http.ResponseWriter, r *htt
queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "producer-throughput-details", kafkaSpanEval) queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "producer-throughput-details", kafkaSpanEval)
if err != nil { if err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to build query range params for producer throughput details", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil { if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to validate query range params for producer throughput details", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -3029,7 +3030,7 @@ func (aH *APIHandler) getConsumerThroughputOverview(w http.ResponseWriter, r *ht
messagingQueue, apiErr := ParseKafkaQueueBody(r) messagingQueue, apiErr := ParseKafkaQueueBody(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error(apiErr.Err.Error()) aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -3039,13 +3040,13 @@ func (aH *APIHandler) getConsumerThroughputOverview(w http.ResponseWriter, r *ht
queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "consumer-throughput-overview", kafkaSpanEval) queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "consumer-throughput-overview", kafkaSpanEval)
if err != nil { if err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to build query range params for consumer throughput overview", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil { if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to validate query range params for consumer throughput overview", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -3087,7 +3088,7 @@ func (aH *APIHandler) getConsumerThroughputDetails(w http.ResponseWriter, r *htt
messagingQueue, apiErr := ParseKafkaQueueBody(r) messagingQueue, apiErr := ParseKafkaQueueBody(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error(apiErr.Err.Error()) aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -3097,13 +3098,13 @@ func (aH *APIHandler) getConsumerThroughputDetails(w http.ResponseWriter, r *htt
queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "consumer-throughput-details", kafkaSpanEval) queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "consumer-throughput-details", kafkaSpanEval)
if err != nil { if err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to build query range params for consumer throughput details", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil { if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to validate query range params for consumer throughput details", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -3148,7 +3149,7 @@ func (aH *APIHandler) getProducerConsumerEval(w http.ResponseWriter, r *http.Req
messagingQueue, apiErr := ParseKafkaQueueBody(r) messagingQueue, apiErr := ParseKafkaQueueBody(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error(apiErr.Err.Error()) aH.logger.ErrorContext(r.Context(), "failed to parse kafka queue body", "error", apiErr.Err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -3158,7 +3159,7 @@ func (aH *APIHandler) getProducerConsumerEval(w http.ResponseWriter, r *http.Req
queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "producer-consumer-eval", kafkaSpanEval) queryRangeParams, err := kafka.BuildQueryRangeParams(messagingQueue, "producer-consumer-eval", kafkaSpanEval)
if err != nil { if err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to build query range params for producer consumer eval", "error", err)
RespondError(w, &model.ApiError{ RespondError(w, &model.ApiError{
Typ: model.ErrorBadData, Typ: model.ErrorBadData,
Err: err, Err: err,
@@ -3167,7 +3168,7 @@ func (aH *APIHandler) getProducerConsumerEval(w http.ResponseWriter, r *http.Req
} }
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil { if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to validate query range params for producer consumer eval", "error", err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -4255,7 +4256,7 @@ func (aH *APIHandler) CreateLogsPipeline(w http.ResponseWriter, r *http.Request)
postable []pipelinetypes.PostablePipeline, postable []pipelinetypes.PostablePipeline,
) (*logparsingpipeline.PipelinesResponse, error) { ) (*logparsingpipeline.PipelinesResponse, error) {
if len(postable) == 0 { if len(postable) == 0 {
zap.L().Warn("found no pipelines in the http request, this will delete all the pipelines") aH.logger.WarnContext(r.Context(), "found no pipelines in the http request, this will delete all the pipelines")
} }
err := aH.LogsParsingPipelineController.ValidatePipelines(ctx, postable) err := aH.LogsParsingPipelineController.ValidatePipelines(ctx, postable)
@@ -4453,7 +4454,7 @@ func (aH *APIHandler) QueryRangeV3Format(w http.ResponseWriter, r *http.Request)
queryRangeParams, apiErrorObj := ParseQueryRangeParams(r) queryRangeParams, apiErrorObj := ParseQueryRangeParams(r)
if apiErrorObj != nil { if apiErrorObj != nil {
zap.L().Error(apiErrorObj.Err.Error()) aH.logger.ErrorContext(r.Context(), "error parsing query range params", "error", apiErrorObj.Err)
RespondError(w, apiErrorObj, nil) RespondError(w, apiErrorObj, nil)
return return
} }
@@ -4515,13 +4516,13 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
// check if traceID is used as filter (with equal/similar operator) in traces query if yes add timestamp filter to queryRange params // check if traceID is used as filter (with equal/similar operator) in traces query if yes add timestamp filter to queryRange params
isUsed, traceIDs := tracesV3.TraceIdFilterUsedWithEqual(queryRangeParams) isUsed, traceIDs := tracesV3.TraceIdFilterUsedWithEqual(queryRangeParams)
if isUsed && len(traceIDs) > 0 { if isUsed && len(traceIDs) > 0 {
zap.L().Debug("traceID used as filter in traces query") aH.logger.DebugContext(ctx, "trace_id used as filter in traces query")
// query signoz_spans table with traceID to get min and max timestamp // query signoz_spans table with traceID to get min and max timestamp
min, max, err := aH.reader.GetMinAndMaxTimestampForTraceID(ctx, traceIDs) min, max, err := aH.reader.GetMinAndMaxTimestampForTraceID(ctx, traceIDs)
if err == nil { if err == nil {
// add timestamp filter to queryRange params // add timestamp filter to queryRange params
tracesV3.AddTimestampFilters(min, max, queryRangeParams) tracesV3.AddTimestampFilters(min, max, queryRangeParams)
zap.L().Debug("post adding timestamp filter in traces query", zap.Any("queryRangeParams", queryRangeParams)) aH.logger.DebugContext(ctx, "post adding timestamp filter in traces query", "query_range_params", queryRangeParams)
} }
} }
} }
@@ -4532,9 +4533,8 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
onQueryFinished, apiErr := aH.reader.ReportQueryStartForProgressTracking(queryIdHeader) onQueryFinished, apiErr := aH.reader.ReportQueryStartForProgressTracking(queryIdHeader)
if apiErr != nil { if apiErr != nil {
zap.L().Error( aH.logger.ErrorContext(ctx, "failed to report query start for progress tracking",
"couldn't report query start for progress tracking", "query_id", queryIdHeader, "error", apiErr,
zap.String("queryId", queryIdHeader), zap.Error(apiErr),
) )
} else { } else {
@@ -4709,7 +4709,7 @@ func (aH *APIHandler) QueryRangeV3(w http.ResponseWriter, r *http.Request) {
queryRangeParams, apiErrorObj := ParseQueryRangeParams(r) queryRangeParams, apiErrorObj := ParseQueryRangeParams(r)
if apiErrorObj != nil { if apiErrorObj != nil {
zap.L().Error("error parsing metric query range params", zap.Error(apiErrorObj.Err)) aH.logger.ErrorContext(r.Context(), "error parsing metric query range params", "error", apiErrorObj.Err)
RespondError(w, apiErrorObj, nil) RespondError(w, apiErrorObj, nil)
return return
} }
@@ -4717,7 +4717,7 @@ func (aH *APIHandler) QueryRangeV3(w http.ResponseWriter, r *http.Request) {
// add temporality for each metric // add temporality for each metric
temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams) temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams)
if temporalityErr != nil { if temporalityErr != nil {
zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr)) aH.logger.ErrorContext(r.Context(), "error adding temporality for metrics", "error", temporalityErr)
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil) RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
return return
} }
@@ -4761,9 +4761,8 @@ func (aH *APIHandler) GetQueryProgressUpdates(w http.ResponseWriter, r *http.Req
progressCh, unsubscribe, apiErr := aH.reader.SubscribeToQueryProgress(queryId) progressCh, unsubscribe, apiErr := aH.reader.SubscribeToQueryProgress(queryId)
if apiErr != nil { if apiErr != nil {
// Shouldn't happen unless query progress requested after query finished // Shouldn't happen unless query progress requested after query finished
zap.L().Warn( aH.logger.WarnContext(r.Context(), "failed to subscribe to query progress",
"couldn't subscribe to query progress", "query_id", queryId, "error", apiErr,
zap.String("queryId", queryId), zap.Any("error", apiErr),
) )
return return
} }
@@ -4772,25 +4771,22 @@ func (aH *APIHandler) GetQueryProgressUpdates(w http.ResponseWriter, r *http.Req
for queryProgress := range progressCh { for queryProgress := range progressCh {
msg, err := json.Marshal(queryProgress) msg, err := json.Marshal(queryProgress)
if err != nil { if err != nil {
zap.L().Error( aH.logger.ErrorContext(r.Context(), "failed to serialize progress message",
"failed to serialize progress message", "query_id", queryId, "progress", queryProgress, "error", err,
zap.String("queryId", queryId), zap.Any("progress", queryProgress), zap.Error(err),
) )
continue continue
} }
err = c.WriteMessage(websocket.TextMessage, msg) err = c.WriteMessage(websocket.TextMessage, msg)
if err != nil { if err != nil {
zap.L().Error( aH.logger.ErrorContext(r.Context(), "failed to write progress message to websocket",
"failed to write progress msg to websocket", "query_id", queryId, "msg", string(msg), "error", err,
zap.String("queryId", queryId), zap.String("msg", string(msg)), zap.Error(err),
) )
break break
} else { } else {
zap.L().Debug( aH.logger.DebugContext(r.Context(), "wrote progress message to websocket",
"wrote progress msg to websocket", "query_id", queryId, "msg", string(msg),
zap.String("queryId", queryId), zap.String("msg", string(msg)), zap.Error(err),
) )
} }
} }
@@ -4874,13 +4870,13 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
// check if traceID is used as filter (with equal/similar operator) in traces query if yes add timestamp filter to queryRange params // check if traceID is used as filter (with equal/similar operator) in traces query if yes add timestamp filter to queryRange params
isUsed, traceIDs := tracesV3.TraceIdFilterUsedWithEqual(queryRangeParams) isUsed, traceIDs := tracesV3.TraceIdFilterUsedWithEqual(queryRangeParams)
if isUsed && len(traceIDs) > 0 { if isUsed && len(traceIDs) > 0 {
zap.L().Debug("traceID used as filter in traces query") aH.logger.DebugContext(ctx, "trace_id used as filter in traces query")
// query signoz_spans table with traceID to get min and max timestamp // query signoz_spans table with traceID to get min and max timestamp
min, max, err := aH.reader.GetMinAndMaxTimestampForTraceID(ctx, traceIDs) min, max, err := aH.reader.GetMinAndMaxTimestampForTraceID(ctx, traceIDs)
if err == nil { if err == nil {
// add timestamp filter to queryRange params // add timestamp filter to queryRange params
tracesV3.AddTimestampFilters(min, max, queryRangeParams) tracesV3.AddTimestampFilters(min, max, queryRangeParams)
zap.L().Debug("post adding timestamp filter in traces query", zap.Any("queryRangeParams", queryRangeParams)) aH.logger.DebugContext(ctx, "post adding timestamp filter in traces query", "query_range_params", queryRangeParams)
} }
} }
} }
@@ -4932,7 +4928,7 @@ func (aH *APIHandler) QueryRangeV4(w http.ResponseWriter, r *http.Request) {
queryRangeParams, apiErrorObj := ParseQueryRangeParams(r) queryRangeParams, apiErrorObj := ParseQueryRangeParams(r)
if apiErrorObj != nil { if apiErrorObj != nil {
zap.L().Error("error parsing metric query range params", zap.Error(apiErrorObj.Err)) aH.logger.ErrorContext(r.Context(), "error parsing metric query range params", "error", apiErrorObj.Err)
RespondError(w, apiErrorObj, nil) RespondError(w, apiErrorObj, nil)
return return
} }
@@ -4941,7 +4937,7 @@ func (aH *APIHandler) QueryRangeV4(w http.ResponseWriter, r *http.Request) {
// add temporality for each metric // add temporality for each metric
temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams) temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams)
if temporalityErr != nil { if temporalityErr != nil {
zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr)) aH.logger.ErrorContext(r.Context(), "error adding temporality for metrics", "error", temporalityErr)
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil) RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
return return
} }
@@ -4990,7 +4986,7 @@ func (aH *APIHandler) getQueueOverview(w http.ResponseWriter, r *http.Request) {
queueListRequest, apiErr := ParseQueueBody(r) queueListRequest, apiErr := ParseQueueBody(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error(apiErr.Err.Error()) aH.logger.ErrorContext(r.Context(), "failed to parse queue body", "error", apiErr.Err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -4998,7 +4994,7 @@ func (aH *APIHandler) getQueueOverview(w http.ResponseWriter, r *http.Request) {
chq, err := queues2.BuildOverviewQuery(queueListRequest) chq, err := queues2.BuildOverviewQuery(queueListRequest)
if err != nil { if err != nil {
zap.L().Error(err.Error()) aH.logger.ErrorContext(r.Context(), "failed to build queue overview query", "error", err)
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil) RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
return return
} }
@@ -5029,7 +5025,7 @@ func (aH *APIHandler) getDomainList(w http.ResponseWriter, r *http.Request) {
// Parse the request body to get third-party query parameters // Parse the request body to get third-party query parameters
thirdPartyQueryRequest, apiErr := ParseRequestBody(r) thirdPartyQueryRequest, apiErr := ParseRequestBody(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error("Failed to parse request body", zap.Error(apiErr)) aH.logger.ErrorContext(r.Context(), "failed to parse request body", "error", apiErr)
render.Error(w, errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, apiErr.Error())) render.Error(w, errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, apiErr.Error()))
return return
} }
@@ -5037,7 +5033,7 @@ func (aH *APIHandler) getDomainList(w http.ResponseWriter, r *http.Request) {
// Build the v5 query range request for domain listing // Build the v5 query range request for domain listing
queryRangeRequest, err := thirdpartyapi.BuildDomainList(thirdPartyQueryRequest) queryRangeRequest, err := thirdpartyapi.BuildDomainList(thirdPartyQueryRequest)
if err != nil { if err != nil {
zap.L().Error("Failed to build domain list query", zap.Error(err)) aH.logger.ErrorContext(r.Context(), "failed to build domain list query", "error", err)
apiErrObj := errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error()) apiErrObj := errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error())
render.Error(w, apiErrObj) render.Error(w, apiErrObj)
return return
@@ -5050,7 +5046,7 @@ func (aH *APIHandler) getDomainList(w http.ResponseWriter, r *http.Request) {
// Execute the query using the v5 querier // Execute the query using the v5 querier
result, err := aH.Signoz.Querier.QueryRange(ctx, orgID, queryRangeRequest) result, err := aH.Signoz.Querier.QueryRange(ctx, orgID, queryRangeRequest)
if err != nil { if err != nil {
zap.L().Error("Query execution failed", zap.Error(err)) aH.logger.ErrorContext(r.Context(), "query execution failed", "error", err)
apiErrObj := errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error()) apiErrObj := errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error())
render.Error(w, apiErrObj) render.Error(w, apiErrObj)
return return
@@ -5089,7 +5085,7 @@ func (aH *APIHandler) getDomainInfo(w http.ResponseWriter, r *http.Request) {
// Parse the request body to get third-party query parameters // Parse the request body to get third-party query parameters
thirdPartyQueryRequest, apiErr := ParseRequestBody(r) thirdPartyQueryRequest, apiErr := ParseRequestBody(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error("Failed to parse request body", zap.Error(apiErr)) aH.logger.ErrorContext(r.Context(), "failed to parse request body", "error", apiErr)
render.Error(w, errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, apiErr.Error())) render.Error(w, errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, apiErr.Error()))
return return
} }
@@ -5097,7 +5093,7 @@ func (aH *APIHandler) getDomainInfo(w http.ResponseWriter, r *http.Request) {
// Build the v5 query range request for domain info // Build the v5 query range request for domain info
queryRangeRequest, err := thirdpartyapi.BuildDomainInfo(thirdPartyQueryRequest) queryRangeRequest, err := thirdpartyapi.BuildDomainInfo(thirdPartyQueryRequest)
if err != nil { if err != nil {
zap.L().Error("Failed to build domain info query", zap.Error(err)) aH.logger.ErrorContext(r.Context(), "failed to build domain info query", "error", err)
apiErrObj := errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error()) apiErrObj := errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error())
render.Error(w, apiErrObj) render.Error(w, apiErrObj)
return return
@@ -5110,7 +5106,7 @@ func (aH *APIHandler) getDomainInfo(w http.ResponseWriter, r *http.Request) {
// Execute the query using the v5 querier // Execute the query using the v5 querier
result, err := aH.Signoz.Querier.QueryRange(ctx, orgID, queryRangeRequest) result, err := aH.Signoz.Querier.QueryRange(ctx, orgID, queryRangeRequest)
if err != nil { if err != nil {
zap.L().Error("Query execution failed", zap.Error(err)) aH.logger.ErrorContext(r.Context(), "query execution failed", "error", err)
apiErrObj := errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error()) apiErrObj := errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error())
render.Error(w, apiErrObj) render.Error(w, apiErrObj)
return return

View File

@@ -17,9 +17,10 @@ import (
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/postprocess" "github.com/SigNoz/signoz/pkg/query-service/postprocess"
"github.com/SigNoz/signoz/pkg/types/ctxtypes" "github.com/SigNoz/signoz/pkg/types/ctxtypes"
"log/slog"
"github.com/SigNoz/signoz/pkg/types/instrumentationtypes" "github.com/SigNoz/signoz/pkg/types/instrumentationtypes"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
) )
@@ -427,7 +428,7 @@ func (h *HostsRepo) GetHostList(ctx context.Context, orgID valuer.UUID, req mode
step := int64(math.Max(float64(common.MinAllowedStepInterval(req.Start, req.End)), 60)) step := int64(math.Max(float64(common.MinAllowedStepInterval(req.Start, req.End)), 60))
if step <= 0 { if step <= 0 {
zap.L().Error("step is less than or equal to 0", zap.Int64("step", step)) slog.ErrorContext(ctx, "step is less than or equal to 0", "step", step)
return resp, errors.New("step is less than or equal to 0") return resp, errors.New("step is less than or equal to 0")
} }

View File

@@ -8,10 +8,11 @@ import (
"gopkg.in/yaml.v3" "gopkg.in/yaml.v3"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes" "github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"go.uber.org/zap"
) )
var lockLogsPipelineSpec sync.RWMutex var lockLogsPipelineSpec sync.RWMutex
@@ -154,14 +155,14 @@ func buildCollectorPipelineProcessorsList(
func checkDuplicateString(pipeline []string) bool { func checkDuplicateString(pipeline []string) bool {
exists := make(map[string]bool, len(pipeline)) exists := make(map[string]bool, len(pipeline))
zap.L().Debug("checking duplicate processors in the pipeline:", zap.Any("pipeline", pipeline)) slog.Debug("checking duplicate processors in the pipeline", "pipeline", pipeline)
for _, processor := range pipeline { for _, processor := range pipeline {
name := processor name := processor
if _, ok := exists[name]; ok { if _, ok := exists[name]; ok {
zap.L().Error( slog.Error(
"duplicate processor name detected in generated collector config for log pipelines", "duplicate processor name detected in generated collector config for log pipelines",
zap.String("processor", processor), "processor", processor,
zap.Any("pipeline", pipeline), "pipeline", pipeline,
) )
return true return true
} }

View File

@@ -21,7 +21,7 @@ import (
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid" "github.com/google/uuid"
"go.uber.org/zap" "log/slog"
) )
var ( var (
@@ -175,7 +175,7 @@ func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
if version >= 0 { if version >= 0 {
savedPipelines, err := ic.getPipelinesByVersion(ctx, orgID.String(), version) savedPipelines, err := ic.getPipelinesByVersion(ctx, orgID.String(), version)
if err != nil { if err != nil {
zap.L().Error("failed to get pipelines for version", zap.Int("version", version), zap.Error(err)) slog.ErrorContext(ctx, "failed to get pipelines for version", "version", version, "error", err)
return nil, err return nil, err
} }
result = savedPipelines result = savedPipelines
@@ -227,7 +227,7 @@ func (ic *LogParsingPipelineController) GetPipelinesByVersion(
) (*PipelinesResponse, error) { ) (*PipelinesResponse, error) {
pipelines, err := ic.getEffectivePipelinesByVersion(ctx, orgId, version) pipelines, err := ic.getEffectivePipelinesByVersion(ctx, orgId, version)
if err != nil { if err != nil {
zap.L().Error("failed to get pipelines for version", zap.Int("version", version), zap.Error(err)) slog.ErrorContext(ctx, "failed to get pipelines for version", "version", version, "error", err)
return nil, err return nil, err
} }
@@ -235,7 +235,7 @@ func (ic *LogParsingPipelineController) GetPipelinesByVersion(
if version >= 0 { if version >= 0 {
cv, err := agentConf.GetConfigVersion(ctx, orgId, opamptypes.ElementTypeLogPipelines, version) cv, err := agentConf.GetConfigVersion(ctx, orgId, opamptypes.ElementTypeLogPipelines, version)
if err != nil { if err != nil {
zap.L().Error("failed to get config for version", zap.Int("version", version), zap.Error(err)) slog.ErrorContext(ctx, "failed to get config for version", "version", version, "error", err)
return nil, err return nil, err
} }
configVersion = cv configVersion = cv

View File

@@ -6,6 +6,8 @@ import (
"fmt" "fmt"
"time" "time"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/sqlstore"
@@ -13,7 +15,6 @@ import (
"github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes" "github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
) )
// Repo handles DDL and DML ops on ingestion pipeline // Repo handles DDL and DML ops on ingestion pipeline
@@ -80,7 +81,7 @@ func (r *Repo) insertPipeline(
Model(&insertRow.StoreablePipeline). Model(&insertRow.StoreablePipeline).
Exec(ctx) Exec(ctx)
if err != nil { if err != nil {
zap.L().Error("error in inserting pipeline data", zap.Error(err)) slog.ErrorContext(ctx, "error in inserting pipeline data", "error", err)
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to insert pipeline") return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to insert pipeline")
} }
@@ -136,12 +137,12 @@ func (r *Repo) GetPipeline(
Where("org_id = ?", orgID). Where("org_id = ?", orgID).
Scan(ctx) Scan(ctx)
if err != nil { if err != nil {
zap.L().Error("failed to get ingestion pipeline from db", zap.Error(err)) slog.ErrorContext(ctx, "failed to get ingestion pipeline from db", "error", err)
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to get ingestion pipeline from db") return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to get ingestion pipeline from db")
} }
if len(storablePipelines) == 0 { if len(storablePipelines) == 0 {
zap.L().Warn("No row found for ingestion pipeline id", zap.String("id", id)) slog.WarnContext(ctx, "no row found for ingestion pipeline id", "id", id)
return nil, errors.NewNotFoundf(errors.CodeNotFound, "no row found for ingestion pipeline id %v", id) return nil, errors.NewNotFoundf(errors.CodeNotFound, "no row found for ingestion pipeline id %v", id)
} }
@@ -149,11 +150,11 @@ func (r *Repo) GetPipeline(
gettablePipeline := pipelinetypes.GettablePipeline{} gettablePipeline := pipelinetypes.GettablePipeline{}
gettablePipeline.StoreablePipeline = storablePipelines[0] gettablePipeline.StoreablePipeline = storablePipelines[0]
if err := gettablePipeline.ParseRawConfig(); err != nil { if err := gettablePipeline.ParseRawConfig(); err != nil {
zap.L().Error("invalid pipeline config found", zap.String("id", id), zap.Error(err)) slog.ErrorContext(ctx, "invalid pipeline config found", "id", id, "error", err)
return nil, err return nil, err
} }
if err := gettablePipeline.ParseFilter(); err != nil { if err := gettablePipeline.ParseFilter(); err != nil {
zap.L().Error("invalid pipeline filter found", zap.String("id", id), zap.Error(err)) slog.ErrorContext(ctx, "invalid pipeline filter found", "id", id, "error", err)
return nil, err return nil, err
} }
return &gettablePipeline, nil return &gettablePipeline, nil

View File

@@ -2,12 +2,12 @@ package metrics
import ( import (
"fmt" "fmt"
"log/slog"
"reflect" "reflect"
"strconv" "strconv"
"strings" "strings"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"go.uber.org/zap"
) )
func AddMetricValueFilter(mq *v3.BuilderQuery) *v3.MetricValueFilter { func AddMetricValueFilter(mq *v3.BuilderQuery) *v3.MetricValueFilter {
@@ -69,7 +69,7 @@ func AddMetricValueFilter(mq *v3.BuilderQuery) *v3.MetricValueFilter {
case string: case string:
numericValue, err := strconv.ParseFloat(v, 64) numericValue, err := strconv.ParseFloat(v, 64)
if err != nil { if err != nil {
zap.L().Warn("invalid type for metric value filter, ignoring", zap.Any("type", reflect.TypeOf(v)), zap.String("value", v)) slog.Warn("invalid type for metric value filter, ignoring", "type", reflect.TypeOf(v), "value", v)
continue continue
} }
metricValueFilter = &v3.MetricValueFilter{ metricValueFilter = &v3.MetricValueFilter{
@@ -111,11 +111,11 @@ func FormattedValue(v interface{}) string {
case int, float32, float64, bool: case int, float32, float64, bool:
return strings.Join(strings.Fields(fmt.Sprint(x)), ",") return strings.Join(strings.Fields(fmt.Sprint(x)), ",")
default: default:
zap.L().Error("invalid type for formatted value", zap.Any("type", reflect.TypeOf(x[0]))) slog.Error("invalid type for formatted value", "type", reflect.TypeOf(x[0]))
return "" return ""
} }
default: default:
zap.L().Error("invalid type for formatted value", zap.Any("type", reflect.TypeOf(x))) slog.Error("invalid type for formatted value", "type", reflect.TypeOf(x))
return "" return ""
} }
} }
@@ -144,11 +144,11 @@ func PromFormattedValue(v interface{}) string {
} }
return strings.Join(str, "|") return strings.Join(str, "|")
default: default:
zap.L().Error("invalid type for prom formatted value", zap.Any("type", reflect.TypeOf(x[0]))) slog.Error("invalid type for prom formatted value", "type", reflect.TypeOf(x[0]))
return "" return ""
} }
default: default:
zap.L().Error("invalid type for prom formatted value", zap.Any("type", reflect.TypeOf(x))) slog.Error("invalid type for prom formatted value", "type", reflect.TypeOf(x))
return "" return ""
} }
} }

View File

@@ -8,7 +8,7 @@ import (
"strings" "strings"
"time" "time"
"go.uber.org/zap" "log/slog"
"github.com/SigNoz/signoz/pkg/modules/dashboard" "github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/query-service/interfaces" "github.com/SigNoz/signoz/pkg/query-service/interfaces"
@@ -173,14 +173,14 @@ func (receiver *SummaryService) GetMetricsSummary(ctx context.Context, orgID val
if data != nil { if data != nil {
jsonData, err := json.Marshal(data) jsonData, err := json.Marshal(data)
if err != nil { if err != nil {
zap.L().Error("Error marshalling data:", zap.Error(err)) slog.Error("error marshalling data", "error", err)
return &model.ApiError{Typ: "MarshallingErr", Err: err} return &model.ApiError{Typ: "MarshallingErr", Err: err}
} }
var dashboards map[string][]metrics_explorer.Dashboard var dashboards map[string][]metrics_explorer.Dashboard
err = json.Unmarshal(jsonData, &dashboards) err = json.Unmarshal(jsonData, &dashboards)
if err != nil { if err != nil {
zap.L().Error("Error unmarshalling data:", zap.Error(err)) slog.Error("error unmarshalling data", "error", err)
return &model.ApiError{Typ: "UnMarshallingErr", Err: err} return &model.ApiError{Typ: "UnMarshallingErr", Err: err}
} }
if _, ok := dashboards[metricName]; ok { if _, ok := dashboards[metricName]; ok {
@@ -264,7 +264,7 @@ func (receiver *SummaryService) GetRelatedMetrics(ctx context.Context, params *m
if err != nil { if err != nil {
// If we hit a deadline exceeded error, proceed with only name similarity // If we hit a deadline exceeded error, proceed with only name similarity
if errors.Is(err.Err, context.DeadlineExceeded) { if errors.Is(err.Err, context.DeadlineExceeded) {
zap.L().Warn("Attribute similarity calculation timed out, proceeding with name similarity only") slog.Warn("attribute similarity calculation timed out, proceeding with name similarity only")
attrSimilarityScores = make(map[string]metrics_explorer.RelatedMetricsScore) attrSimilarityScores = make(map[string]metrics_explorer.RelatedMetricsScore)
} else { } else {
return nil, err return nil, err
@@ -350,12 +350,12 @@ func (receiver *SummaryService) GetRelatedMetrics(ctx context.Context, params *m
if names != nil { if names != nil {
jsonData, err := json.Marshal(names) jsonData, err := json.Marshal(names)
if err != nil { if err != nil {
zap.L().Error("Error marshalling dashboard data", zap.Error(err)) slog.Error("error marshalling dashboard data", "error", err)
return &model.ApiError{Typ: "MarshallingErr", Err: err} return &model.ApiError{Typ: "MarshallingErr", Err: err}
} }
err = json.Unmarshal(jsonData, &dashboardsRelatedData) err = json.Unmarshal(jsonData, &dashboardsRelatedData)
if err != nil { if err != nil {
zap.L().Error("Error unmarshalling dashboard data", zap.Error(err)) slog.Error("error unmarshalling dashboard data", "error", err)
return &model.ApiError{Typ: "UnMarshallingErr", Err: err} return &model.ApiError{Typ: "UnMarshallingErr", Err: err}
} }
} }

View File

@@ -3,6 +3,7 @@ package opamp
import ( import (
"context" "context"
"crypto/sha256" "crypto/sha256"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/errors"
model "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model" model "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
@@ -10,7 +11,6 @@ import (
"github.com/knadh/koanf/parsers/yaml" "github.com/knadh/koanf/parsers/yaml"
"github.com/open-telemetry/opamp-go/protobufs" "github.com/open-telemetry/opamp-go/protobufs"
"go.opentelemetry.io/collector/confmap" "go.opentelemetry.io/collector/confmap"
"go.uber.org/zap"
) )
var ( var (
@@ -29,10 +29,10 @@ func UpsertControlProcessors(ctx context.Context, signal string,
// AddToTracePipeline() or RemoveFromTracesPipeline() prior to calling // AddToTracePipeline() or RemoveFromTracesPipeline() prior to calling
// this method // this method
zap.L().Debug("initiating ingestion rules deployment config", zap.String("signal", signal), zap.Any("processors", processors)) slog.Debug("initiating ingestion rules deployment config", "signal", signal, "processors", processors)
if signal != string(Metrics) && signal != string(Traces) { if signal != string(Metrics) && signal != string(Traces) {
zap.L().Error("received invalid signal int UpsertControlProcessors", zap.String("signal", signal)) slog.Error("received invalid signal in UpsertControlProcessors", "signal", signal)
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "signal not supported in ingestion rules: %s", signal) return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "signal not supported in ingestion rules: %s", signal)
} }
@@ -46,14 +46,14 @@ func UpsertControlProcessors(ctx context.Context, signal string,
} }
if len(agents) > 1 && signal == string(Traces) { if len(agents) > 1 && signal == string(Traces) {
zap.L().Debug("found multiple agents. this feature is not supported for traces pipeline (sampling rules)") slog.Debug("found multiple agents, this feature is not supported for traces pipeline (sampling rules)")
return "", errors.NewInvalidInputf(CodeMultipleAgentsNotSupported, "multiple agents not supported in sampling rules") return "", errors.NewInvalidInputf(CodeMultipleAgentsNotSupported, "multiple agents not supported in sampling rules")
} }
hash := "" hash := ""
for _, agent := range agents { for _, agent := range agents {
agenthash, err := addIngestionControlToAgent(agent, signal, processors, false) agenthash, err := addIngestionControlToAgent(agent, signal, processors, false)
if err != nil { if err != nil {
zap.L().Error("failed to push ingestion rules config to agent", zap.String("agentID", agent.AgentID), zap.Error(err)) slog.Error("failed to push ingestion rules config to agent", "agent_id", agent.AgentID, "error", err)
continue continue
} }
@@ -82,7 +82,7 @@ func addIngestionControlToAgent(agent *model.Agent, signal string, processors ma
// add ingestion control spec // add ingestion control spec
err = makeIngestionControlSpec(agentConf, Signal(signal), processors) err = makeIngestionControlSpec(agentConf, Signal(signal), processors)
if err != nil { if err != nil {
zap.L().Error("failed to prepare ingestion control processors for agent", zap.String("agentID", agent.AgentID), zap.Error(err)) slog.Error("failed to prepare ingestion control processors for agent", "agent_id", agent.AgentID, "error", err)
return confHash, err return confHash, err
} }
@@ -92,7 +92,7 @@ func addIngestionControlToAgent(agent *model.Agent, signal string, processors ma
return confHash, err return confHash, err
} }
zap.L().Debug("sending new config", zap.String("config", string(configR))) slog.Debug("sending new config", "config", string(configR))
hash := sha256.New() hash := sha256.New()
_, err = hash.Write(configR) _, err = hash.Write(configR)
if err != nil { if err != nil {
@@ -133,7 +133,7 @@ func makeIngestionControlSpec(agentConf *confmap.Conf, signal Signal, processors
// merge tracesPipelinePlan with current pipeline // merge tracesPipelinePlan with current pipeline
mergedPipeline, err := buildPipeline(signal, currentPipeline) mergedPipeline, err := buildPipeline(signal, currentPipeline)
if err != nil { if err != nil {
zap.L().Error("failed to build pipeline", zap.String("signal", string(signal)), zap.Error(err)) slog.Error("failed to build pipeline", "signal", string(signal), "error", err)
return err return err
} }

View File

@@ -11,7 +11,6 @@ import (
"github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/opamptypes" "github.com/SigNoz/signoz/pkg/types/opamptypes"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
"google.golang.org/protobuf/proto" "google.golang.org/protobuf/proto"
"github.com/open-telemetry/opamp-go/protobufs" "github.com/open-telemetry/opamp-go/protobufs"
@@ -305,7 +304,7 @@ func (agent *Agent) processStatusUpdate(
func (agent *Agent) updateRemoteConfig(configProvider AgentConfigProvider) bool { func (agent *Agent) updateRemoteConfig(configProvider AgentConfigProvider) bool {
recommendedConfig, confId, err := configProvider.RecommendAgentConfig(agent.OrgID, []byte(agent.Config)) recommendedConfig, confId, err := configProvider.RecommendAgentConfig(agent.OrgID, []byte(agent.Config))
if err != nil { if err != nil {
zap.L().Error("could not generate config recommendation for agent", zap.String("agentID", agent.AgentID), zap.Error(err)) agent.logger.Error("could not generate config recommendation for agent", "agent_id", agent.AgentID, "error", err)
return false return false
} }
@@ -322,7 +321,7 @@ func (agent *Agent) updateRemoteConfig(configProvider AgentConfigProvider) bool
if len(confId) < 1 { if len(confId) < 1 {
// Should never happen. Handle gracefully if it does by some chance. // Should never happen. Handle gracefully if it does by some chance.
zap.L().Error("config provider recommended a config with empty confId. Using content hash for configId") agent.logger.Error("config provider recommended a config with empty conf_id, using content hash for config_id")
hash := sha256.New() hash := sha256.New()
for k, v := range cfg.Config.ConfigMap { for k, v := range cfg.Config.ConfigMap {

View File

@@ -14,7 +14,6 @@ import (
"github.com/open-telemetry/opamp-go/protobufs" "github.com/open-telemetry/opamp-go/protobufs"
"github.com/open-telemetry/opamp-go/server/types" "github.com/open-telemetry/opamp-go/server/types"
"github.com/pkg/errors" "github.com/pkg/errors"
"go.uber.org/zap"
) )
var AllAgents = Agents{ var AllAgents = Agents{
@@ -135,8 +134,8 @@ func (agents *Agents) RecommendLatestConfigToAll(
// Recommendation is same as current config // Recommendation is same as current config
if string(newConfig) == agent.Config { if string(newConfig) == agent.Config {
zap.L().Info( agents.logger.Info(
"Recommended config same as current effective config for agent", zap.String("agentID", agent.AgentID), "recommended config same as current effective config for agent", "agent_id", agent.AgentID,
) )
return nil return nil
} }

View File

@@ -2,6 +2,7 @@ package opamp
import ( import (
"context" "context"
"log/slog"
"net/http" "net/http"
"time" "time"
@@ -11,8 +12,6 @@ import (
"github.com/open-telemetry/opamp-go/protobufs" "github.com/open-telemetry/opamp-go/protobufs"
"github.com/open-telemetry/opamp-go/server" "github.com/open-telemetry/opamp-go/server"
"github.com/open-telemetry/opamp-go/server/types" "github.com/open-telemetry/opamp-go/server/types"
"go.uber.org/zap"
) )
var opAmpServer *Server var opAmpServer *Server
@@ -20,6 +19,7 @@ var opAmpServer *Server
type Server struct { type Server struct {
server server.OpAMPServer server server.OpAMPServer
agents *model.Agents agents *model.Agents
logger *slog.Logger
agentConfigProvider AgentConfigProvider agentConfigProvider AgentConfigProvider
@@ -43,6 +43,7 @@ func InitializeServer(
opAmpServer = &Server{ opAmpServer = &Server{
agents: agents, agents: agents,
agentConfigProvider: agentConfigProvider, agentConfigProvider: agentConfigProvider,
logger: instrumentation.Logger(),
} }
opAmpServer.server = server.New(wrappedLogger(instrumentation.Logger())) opAmpServer.server = server.New(wrappedLogger(instrumentation.Logger()))
return opAmpServer return opAmpServer
@@ -70,8 +71,8 @@ func (srv *Server) Start(listener string) error {
unsubscribe := srv.agentConfigProvider.SubscribeToConfigUpdates(func() { unsubscribe := srv.agentConfigProvider.SubscribeToConfigUpdates(func() {
err := srv.agents.RecommendLatestConfigToAll(srv.agentConfigProvider) err := srv.agents.RecommendLatestConfigToAll(srv.agentConfigProvider)
if err != nil { if err != nil {
zap.L().Error( srv.logger.Error(
"could not roll out latest config recommendation to connected agents", zap.Error(err), "could not roll out latest config recommendation to connected agents", "error", err,
) )
} }
}) })
@@ -114,7 +115,7 @@ func (srv *Server) OnMessage(ctx context.Context, conn types.Connection, msg *pr
// agents sends the effective config when we processStatusUpdate. // agents sends the effective config when we processStatusUpdate.
agent, created, err := srv.agents.FindOrCreateAgent(agentID.String(), conn, orgID) agent, created, err := srv.agents.FindOrCreateAgent(agentID.String(), conn, orgID)
if err != nil { if err != nil {
zap.L().Error("Failed to find or create agent", zap.String("agentID", agentID.String()), zap.Error(err)) srv.logger.Error("failed to find or create agent", "agent_id", agentID.String(), "error", err)
// Return error response according to OpAMP protocol // Return error response according to OpAMP protocol
return &protobufs.ServerToAgent{ return &protobufs.ServerToAgent{
@@ -134,10 +135,10 @@ func (srv *Server) OnMessage(ctx context.Context, conn types.Connection, msg *pr
if created { if created {
agent.CanLB = model.ExtractLbFlag(msg.AgentDescription) agent.CanLB = model.ExtractLbFlag(msg.AgentDescription)
zap.L().Debug( srv.logger.Debug(
"New agent added", zap.Bool("canLb", agent.CanLB), "new agent added", "can_lb", agent.CanLB,
zap.String("agentID", agent.AgentID), "agent_id", agent.AgentID,
zap.Any("status", agent.Status), "status", agent.Status,
) )
} }
@@ -158,7 +159,7 @@ func Ready() bool {
return false return false
} }
if opAmpServer.agents.Count() == 0 { if opAmpServer.agents.Count() == 0 {
zap.L().Warn("no agents available, all agent config requests will be rejected") slog.Warn("no agents available, all agent config requests will be rejected")
return false return false
} }
return true return true

View File

@@ -2,9 +2,8 @@ package opamp
import ( import (
"fmt" "fmt"
"log/slog"
"sync" "sync"
"go.uber.org/zap"
) )
var lockTracesPipelineSpec sync.RWMutex var lockTracesPipelineSpec sync.RWMutex
@@ -89,7 +88,7 @@ func RemoveFromMetricsPipelineSpec(name string) {
func checkDuplicates(pipeline []interface{}) bool { func checkDuplicates(pipeline []interface{}) bool {
exists := make(map[string]bool, len(pipeline)) exists := make(map[string]bool, len(pipeline))
zap.L().Debug("checking duplicate processors in the pipeline", zap.Any("pipeline", pipeline)) slog.Debug("checking duplicate processors in the pipeline", "pipeline", pipeline)
for _, processor := range pipeline { for _, processor := range pipeline {
name := processor.(string) name := processor.(string)
if _, ok := exists[name]; ok { if _, ok := exists[name]; ok {
@@ -149,7 +148,7 @@ func buildPipeline(signal Signal, current []interface{}) ([]interface{}, error)
currentPos := loc + inserts currentPos := loc + inserts
// if disabled then remove from the pipeline // if disabled then remove from the pipeline
if !m.Enabled { if !m.Enabled {
zap.L().Debug("build_pipeline: found a disabled item, removing from pipeline at position", zap.Int("position", currentPos-1), zap.String("processor", m.Name)) slog.Debug("build_pipeline: found a disabled item, removing from pipeline at position", "position", currentPos-1, "processor", m.Name)
if currentPos-1 <= 0 { if currentPos-1 <= 0 {
pipeline = pipeline[currentPos+1:] pipeline = pipeline[currentPos+1:]
} else { } else {
@@ -170,10 +169,10 @@ func buildPipeline(signal Signal, current []interface{}) ([]interface{}, error)
// right after last matched processsor (e.g. insert filters after tail_sampling for existing list of [batch, tail_sampling]) // right after last matched processsor (e.g. insert filters after tail_sampling for existing list of [batch, tail_sampling])
if lastMatched <= 0 { if lastMatched <= 0 {
zap.L().Debug("build_pipeline: found a new item to be inserted, inserting at position 0", zap.String("processor", m.Name)) slog.Debug("build_pipeline: found a new item to be inserted, inserting at position 0", "processor", m.Name)
pipeline = append([]interface{}{m.Name}, pipeline[lastMatched+1:]...) pipeline = append([]interface{}{m.Name}, pipeline[lastMatched+1:]...)
} else { } else {
zap.L().Debug("build_pipeline: found a new item to be inserted, inserting at position", zap.Int("position", lastMatched), zap.String("processor", m.Name)) slog.Debug("build_pipeline: found a new item to be inserted, inserting at position", "position", lastMatched, "processor", m.Name)
prior := make([]interface{}, len(pipeline[:lastMatched])) prior := make([]interface{}, len(pipeline[:lastMatched]))
next := make([]interface{}, len(pipeline[lastMatched:])) next := make([]interface{}, len(pipeline[lastMatched:]))
copy(prior, pipeline[:lastMatched]) copy(prior, pipeline[:lastMatched])

View File

@@ -19,10 +19,11 @@ import (
"github.com/SigNoz/govaluate" "github.com/SigNoz/govaluate"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/kafka" "github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/kafka"
queues2 "github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/queues" queues2 "github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/queues"
"log/slog"
"github.com/gorilla/mux" "github.com/gorilla/mux"
promModel "github.com/prometheus/common/model" promModel "github.com/prometheus/common/model"
"go.uber.org/multierr" "go.uber.org/multierr"
"go.uber.org/zap"
errorsV2 "github.com/SigNoz/signoz/pkg/errors" errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/app/metrics" "github.com/SigNoz/signoz/pkg/query-service/app/metrics"
@@ -740,9 +741,9 @@ func chTransformQuery(query string, variables map[string]interface{}) {
transformer := chVariables.NewQueryTransformer(query, varsForTransform) transformer := chVariables.NewQueryTransformer(query, varsForTransform)
transformedQuery, err := transformer.Transform() transformedQuery, err := transformer.Transform()
if err != nil { if err != nil {
zap.L().Warn("failed to transform clickhouse query", zap.String("query", query), zap.Error(err)) slog.Warn("failed to transform clickhouse query", "query", query, "error", err)
} }
zap.L().Info("transformed clickhouse query", zap.String("transformedQuery", transformedQuery), zap.String("originalQuery", query)) slog.Info("transformed clickhouse query", "transformed_query", transformedQuery, "original_query", query)
} }
func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiError) { func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiError) {

View File

@@ -15,7 +15,6 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/postprocess" "github.com/SigNoz/signoz/pkg/query-service/postprocess"
"github.com/SigNoz/signoz/pkg/query-service/querycache" "github.com/SigNoz/signoz/pkg/query-service/querycache"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
) )
func prepareLogsQuery( func prepareLogsQuery(
@@ -97,7 +96,7 @@ func (q *querier) runBuilderQuery(
var query string var query string
var err error var err error
if _, ok := cacheKeys[queryName]; !ok || params.NoCache { if _, ok := cacheKeys[queryName]; !ok || params.NoCache {
zap.L().Info("skipping cache for logs query", zap.String("queryName", queryName), zap.Int64("start", start), zap.Int64("end", end), zap.Int64("step", builderQuery.StepInterval), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName])) q.logger.InfoContext(ctx, "skipping cache for logs query", "query_name", queryName, "start", start, "end", end, "step", builderQuery.StepInterval, "no_cache", params.NoCache, "cache_key", cacheKeys[queryName])
query, err = prepareLogsQuery(ctx, start, end, builderQuery, params) query, err = prepareLogsQuery(ctx, start, end, builderQuery, params)
if err != nil { if err != nil {
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil} ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
@@ -109,7 +108,7 @@ func (q *querier) runBuilderQuery(
} }
misses := q.queryCache.FindMissingTimeRanges(orgID, start, end, builderQuery.StepInterval, cacheKeys[queryName]) misses := q.queryCache.FindMissingTimeRanges(orgID, start, end, builderQuery.StepInterval, cacheKeys[queryName])
zap.L().Info("cache misses for logs query", zap.Any("misses", misses)) q.logger.InfoContext(ctx, "cache misses for logs query", "misses", misses)
missedSeries := make([]querycache.CachedSeriesData, 0) missedSeries := make([]querycache.CachedSeriesData, 0)
filteredMissedSeries := make([]querycache.CachedSeriesData, 0) filteredMissedSeries := make([]querycache.CachedSeriesData, 0)
for _, miss := range misses { for _, miss := range misses {
@@ -217,7 +216,7 @@ func (q *querier) runBuilderQuery(
// We are only caching the graph panel queries. A non-existant cache key means that the query is not cached. // We are only caching the graph panel queries. A non-existant cache key means that the query is not cached.
// If the query is not cached, we execute the query and return the result without caching it. // If the query is not cached, we execute the query and return the result without caching it.
if _, ok := cacheKeys[queryName]; !ok || params.NoCache { if _, ok := cacheKeys[queryName]; !ok || params.NoCache {
zap.L().Info("skipping cache for metrics query", zap.String("queryName", queryName), zap.Int64("start", start), zap.Int64("end", end), zap.Int64("step", builderQuery.StepInterval), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName])) q.logger.InfoContext(ctx, "skipping cache for metrics query", "query_name", queryName, "start", start, "end", end, "step", builderQuery.StepInterval, "no_cache", params.NoCache, "cache_key", cacheKeys[queryName])
query, err := metricsV3.PrepareMetricQuery(start, end, params.CompositeQuery.QueryType, params.CompositeQuery.PanelType, builderQuery, metricsV3.Options{}) query, err := metricsV3.PrepareMetricQuery(start, end, params.CompositeQuery.QueryType, params.CompositeQuery.PanelType, builderQuery, metricsV3.Options{})
if err != nil { if err != nil {
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil} ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
@@ -230,7 +229,7 @@ func (q *querier) runBuilderQuery(
cacheKey := cacheKeys[queryName] cacheKey := cacheKeys[queryName]
misses := q.queryCache.FindMissingTimeRanges(orgID, start, end, builderQuery.StepInterval, cacheKey) misses := q.queryCache.FindMissingTimeRanges(orgID, start, end, builderQuery.StepInterval, cacheKey)
zap.L().Info("cache misses for metrics query", zap.Any("misses", misses)) q.logger.InfoContext(ctx, "cache misses for metrics query", "misses", misses)
missedSeries := make([]querycache.CachedSeriesData, 0) missedSeries := make([]querycache.CachedSeriesData, 0)
for _, miss := range misses { for _, miss := range misses {
query, err := metricsV3.PrepareMetricQuery( query, err := metricsV3.PrepareMetricQuery(
@@ -297,7 +296,7 @@ func (q *querier) runBuilderExpression(
} }
if _, ok := cacheKeys[queryName]; !ok || params.NoCache { if _, ok := cacheKeys[queryName]; !ok || params.NoCache {
zap.L().Info("skipping cache for expression query", zap.String("queryName", queryName), zap.Int64("start", params.Start), zap.Int64("end", params.End), zap.Int64("step", params.Step), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName])) q.logger.InfoContext(ctx, "skipping cache for expression query", "query_name", queryName, "start", params.Start, "end", params.End, "step", params.Step, "no_cache", params.NoCache, "cache_key", cacheKeys[queryName])
query := queries[queryName] query := queries[queryName]
series, err := q.execClickHouseQuery(ctx, query) series, err := q.execClickHouseQuery(ctx, query)
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series} ch <- channelResult{Err: err, Name: queryName, Query: query, Series: series}
@@ -307,7 +306,7 @@ func (q *querier) runBuilderExpression(
cacheKey := cacheKeys[queryName] cacheKey := cacheKeys[queryName]
step := postprocess.StepIntervalForFunction(params, queryName) step := postprocess.StepIntervalForFunction(params, queryName)
misses := q.queryCache.FindMissingTimeRanges(orgID, params.Start, params.End, step, cacheKey) misses := q.queryCache.FindMissingTimeRanges(orgID, params.Start, params.End, step, cacheKey)
zap.L().Info("cache misses for expression query", zap.Any("misses", misses)) q.logger.InfoContext(ctx, "cache misses for expression query", "misses", misses)
missedSeries := make([]querycache.CachedSeriesData, 0) missedSeries := make([]querycache.CachedSeriesData, 0)
for _, miss := range misses { for _, miss := range misses {
missQueries, _ := q.builder.PrepareQueries(&v3.QueryRangeParamsV3{ missQueries, _ := q.builder.PrepareQueries(&v3.QueryRangeParamsV3{

View File

@@ -18,12 +18,13 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/utils" "github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"log/slog"
"github.com/SigNoz/signoz/pkg/cache" "github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/query-service/interfaces" "github.com/SigNoz/signoz/pkg/query-service/interfaces"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"go.uber.org/multierr" "go.uber.org/multierr"
"go.uber.org/zap"
) )
type channelResult struct { type channelResult struct {
@@ -44,6 +45,8 @@ type querier struct {
builder *queryBuilder.QueryBuilder builder *queryBuilder.QueryBuilder
logger *slog.Logger
// used for testing // used for testing
// TODO(srikanthccv): remove this once we have a proper mock // TODO(srikanthccv): remove this once we have a proper mock
testingMode bool testingMode bool
@@ -85,6 +88,8 @@ func NewQuerier(opts QuerierOptions) interfaces.Querier {
BuildMetricQuery: metricsV3.PrepareMetricQuery, BuildMetricQuery: metricsV3.PrepareMetricQuery,
}), }),
logger: slog.Default(),
testingMode: opts.TestingMode, testingMode: opts.TestingMode,
returnedSeries: opts.ReturnedSeries, returnedSeries: opts.ReturnedSeries,
returnedErr: opts.ReturnedErr, returnedErr: opts.ReturnedErr,
@@ -113,7 +118,7 @@ func (q *querier) execClickHouseQuery(ctx context.Context, query string) ([]*v3.
series.Points = points series.Points = points
} }
if pointsWithNegativeTimestamps > 0 { if pointsWithNegativeTimestamps > 0 {
zap.L().Error("found points with negative timestamps for query", zap.String("query", query)) q.logger.ErrorContext(ctx, "found points with negative timestamps for query", "query", query)
} }
return result, err return result, err
} }
@@ -206,14 +211,14 @@ func (q *querier) runPromQueries(ctx context.Context, orgID valuer.UUID, params
cacheKey, ok := cacheKeys[queryName] cacheKey, ok := cacheKeys[queryName]
if !ok || params.NoCache { if !ok || params.NoCache {
zap.L().Info("skipping cache for metrics prom query", zap.String("queryName", queryName), zap.Int64("start", params.Start), zap.Int64("end", params.End), zap.Int64("step", params.Step), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName])) q.logger.InfoContext(ctx, "skipping cache for metrics prom query", "query_name", queryName, "start", params.Start, "end", params.End, "step", params.Step, "no_cache", params.NoCache, "cache_key", cacheKeys[queryName])
query := metricsV3.BuildPromQuery(promQuery, params.Step, params.Start, params.End) query := metricsV3.BuildPromQuery(promQuery, params.Step, params.Start, params.End)
series, err := q.execPromQuery(ctx, query) series, err := q.execPromQuery(ctx, query)
channelResults <- channelResult{Err: err, Name: queryName, Query: query.Query, Series: series} channelResults <- channelResult{Err: err, Name: queryName, Query: query.Query, Series: series}
return return
} }
misses := q.queryCache.FindMissingTimeRanges(orgID, params.Start, params.End, params.Step, cacheKey) misses := q.queryCache.FindMissingTimeRanges(orgID, params.Start, params.End, params.Step, cacheKey)
zap.L().Info("cache misses for metrics prom query", zap.Any("misses", misses)) q.logger.InfoContext(ctx, "cache misses for metrics prom query", "misses", misses)
missedSeries := make([]querycache.CachedSeriesData, 0) missedSeries := make([]querycache.CachedSeriesData, 0)
for _, miss := range misses { for _, miss := range misses {
query := metricsV3.BuildPromQuery(promQuery, params.Step, miss.Start, miss.End) query := metricsV3.BuildPromQuery(promQuery, params.Step, miss.Start, miss.End)

View File

@@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"math" "math"
"strings" "strings"
"log/slog"
"testing" "testing"
"time" "time"
@@ -1403,6 +1404,7 @@ func Test_querier_Traces_runWindowBasedListQueryDesc(t *testing.T) {
// Create reader and querier // Create reader and querier
reader := clickhouseReader.NewReader( reader := clickhouseReader.NewReader(
slog.Default(),
nil, nil,
telemetryStore, telemetryStore,
prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore), prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore),
@@ -1628,6 +1630,7 @@ func Test_querier_Traces_runWindowBasedListQueryAsc(t *testing.T) {
// Create reader and querier // Create reader and querier
reader := clickhouseReader.NewReader( reader := clickhouseReader.NewReader(
slog.Default(),
nil, nil,
telemetryStore, telemetryStore,
prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore), prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore),
@@ -1928,6 +1931,7 @@ func Test_querier_Logs_runWindowBasedListQueryDesc(t *testing.T) {
// Create reader and querier // Create reader and querier
reader := clickhouseReader.NewReader( reader := clickhouseReader.NewReader(
slog.Default(),
nil, nil,
telemetryStore, telemetryStore,
prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore), prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore),
@@ -2155,6 +2159,7 @@ func Test_querier_Logs_runWindowBasedListQueryAsc(t *testing.T) {
// Create reader and querier // Create reader and querier
reader := clickhouseReader.NewReader( reader := clickhouseReader.NewReader(
slog.Default(),
nil, nil,
telemetryStore, telemetryStore,
prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore), prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore),

View File

@@ -16,7 +16,6 @@ import (
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/querycache" "github.com/SigNoz/signoz/pkg/query-service/querycache"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
) )
func prepareLogsQuery( func prepareLogsQuery(
@@ -99,7 +98,7 @@ func (q *querier) runBuilderQuery(
var query string var query string
var err error var err error
if _, ok := cacheKeys[queryName]; !ok || params.NoCache { if _, ok := cacheKeys[queryName]; !ok || params.NoCache {
zap.L().Info("skipping cache for logs query", zap.String("queryName", queryName), zap.Int64("start", params.Start), zap.Int64("end", params.End), zap.Int64("step", params.Step), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName])) q.logger.InfoContext(ctx, "skipping cache for logs query", "query_name", queryName, "start", params.Start, "end", params.End, "step", params.Step, "no_cache", params.NoCache, "cache_key", cacheKeys[queryName])
query, err = prepareLogsQuery(ctx, start, end, builderQuery, params) query, err = prepareLogsQuery(ctx, start, end, builderQuery, params)
if err != nil { if err != nil {
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil} ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
@@ -110,7 +109,7 @@ func (q *querier) runBuilderQuery(
return return
} }
misses := q.queryCache.FindMissingTimeRangesV2(orgID, start, end, builderQuery.StepInterval, cacheKeys[queryName]) misses := q.queryCache.FindMissingTimeRangesV2(orgID, start, end, builderQuery.StepInterval, cacheKeys[queryName])
zap.L().Info("cache misses for logs query", zap.Any("misses", misses)) q.logger.InfoContext(ctx, "cache misses for logs query", "misses", misses)
missedSeries := make([]querycache.CachedSeriesData, 0) missedSeries := make([]querycache.CachedSeriesData, 0)
filteredMissedSeries := make([]querycache.CachedSeriesData, 0) filteredMissedSeries := make([]querycache.CachedSeriesData, 0)
for _, miss := range misses { for _, miss := range misses {
@@ -219,7 +218,7 @@ func (q *querier) runBuilderQuery(
// We are only caching the graph panel queries. A non-existant cache key means that the query is not cached. // We are only caching the graph panel queries. A non-existant cache key means that the query is not cached.
// If the query is not cached, we execute the query and return the result without caching it. // If the query is not cached, we execute the query and return the result without caching it.
if _, ok := cacheKeys[queryName]; !ok || params.NoCache { if _, ok := cacheKeys[queryName]; !ok || params.NoCache {
zap.L().Info("skipping cache for metrics query", zap.String("queryName", queryName), zap.Int64("start", params.Start), zap.Int64("end", params.End), zap.Int64("step", params.Step), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName])) q.logger.InfoContext(ctx, "skipping cache for metrics query", "query_name", queryName, "start", params.Start, "end", params.End, "step", params.Step, "no_cache", params.NoCache, "cache_key", cacheKeys[queryName])
query, err := metricsV4.PrepareMetricQuery(start, end, params.CompositeQuery.QueryType, params.CompositeQuery.PanelType, builderQuery, metricsV3.Options{}) query, err := metricsV4.PrepareMetricQuery(start, end, params.CompositeQuery.QueryType, params.CompositeQuery.PanelType, builderQuery, metricsV3.Options{})
if err != nil { if err != nil {
ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil} ch <- channelResult{Err: err, Name: queryName, Query: query, Series: nil}
@@ -231,7 +230,7 @@ func (q *querier) runBuilderQuery(
} }
misses := q.queryCache.FindMissingTimeRanges(orgID, start, end, builderQuery.StepInterval, cacheKeys[queryName]) misses := q.queryCache.FindMissingTimeRanges(orgID, start, end, builderQuery.StepInterval, cacheKeys[queryName])
zap.L().Info("cache misses for metrics query", zap.Any("misses", misses)) q.logger.InfoContext(ctx, "cache misses for metrics query", "misses", misses)
missedSeries := make([]querycache.CachedSeriesData, 0) missedSeries := make([]querycache.CachedSeriesData, 0)
for _, miss := range misses { for _, miss := range misses {
query, err := metricsV4.PrepareMetricQuery( query, err := metricsV4.PrepareMetricQuery(
@@ -286,7 +285,7 @@ func (q *querier) ValidateMetricNames(ctx context.Context, query *v3.CompositeQu
for _, query := range query.PromQueries { for _, query := range query.PromQueries {
expr, err := parser.ParseExpr(query.Query) expr, err := parser.ParseExpr(query.Query)
if err != nil { if err != nil {
zap.L().Debug("error parsing promQL expression", zap.String("query", query.Query), zap.Error(err)) q.logger.DebugContext(ctx, "error parsing promql expression", "query", query.Query, "error", err)
continue continue
} }
parser.Inspect(expr, func(node parser.Node, path []parser.Node) error { parser.Inspect(expr, func(node parser.Node, path []parser.Node) error {
@@ -302,14 +301,14 @@ func (q *querier) ValidateMetricNames(ctx context.Context, query *v3.CompositeQu
} }
metrics, err := q.reader.GetNormalizedStatus(ctx, orgID, metricNames) metrics, err := q.reader.GetNormalizedStatus(ctx, orgID, metricNames)
if err != nil { if err != nil {
zap.L().Debug("error getting corresponding normalized metrics", zap.Error(err)) q.logger.DebugContext(ctx, "error getting corresponding normalized metrics", "error", err)
return return
} }
for metricName, metricPresent := range metrics { for metricName, metricPresent := range metrics {
if metricPresent { if metricPresent {
continue continue
} else { } else {
zap.L().Warn("using normalized metric name", zap.String("metrics", metricName)) q.logger.WarnContext(ctx, "using normalized metric name", "metrics", metricName)
continue continue
} }
} }
@@ -320,14 +319,14 @@ func (q *querier) ValidateMetricNames(ctx context.Context, query *v3.CompositeQu
} }
metrics, err := q.reader.GetNormalizedStatus(ctx, orgID, metricNames) metrics, err := q.reader.GetNormalizedStatus(ctx, orgID, metricNames)
if err != nil { if err != nil {
zap.L().Debug("error getting corresponding normalized metrics", zap.Error(err)) q.logger.DebugContext(ctx, "error getting corresponding normalized metrics", "error", err)
return return
} }
for metricName, metricPresent := range metrics { for metricName, metricPresent := range metrics {
if metricPresent { if metricPresent {
continue continue
} else { } else {
zap.L().Warn("using normalized metric name", zap.String("metrics", metricName)) q.logger.WarnContext(ctx, "using normalized metric name", "metrics", metricName)
continue continue
} }
} }

View File

@@ -18,12 +18,13 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/utils" "github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"log/slog"
"github.com/SigNoz/signoz/pkg/cache" "github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/query-service/interfaces" "github.com/SigNoz/signoz/pkg/query-service/interfaces"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"go.uber.org/multierr" "go.uber.org/multierr"
"go.uber.org/zap"
) )
type channelResult struct { type channelResult struct {
@@ -44,6 +45,8 @@ type querier struct {
builder *queryBuilder.QueryBuilder builder *queryBuilder.QueryBuilder
logger *slog.Logger
// used for testing // used for testing
// TODO(srikanthccv): remove this once we have a proper mock // TODO(srikanthccv): remove this once we have a proper mock
testingMode bool testingMode bool
@@ -85,6 +88,8 @@ func NewQuerier(opts QuerierOptions) interfaces.Querier {
BuildMetricQuery: metricsV4.PrepareMetricQuery, BuildMetricQuery: metricsV4.PrepareMetricQuery,
}), }),
logger: slog.Default(),
testingMode: opts.TestingMode, testingMode: opts.TestingMode,
returnedSeries: opts.ReturnedSeries, returnedSeries: opts.ReturnedSeries,
returnedErr: opts.ReturnedErr, returnedErr: opts.ReturnedErr,
@@ -115,7 +120,7 @@ func (q *querier) execClickHouseQuery(ctx context.Context, query string) ([]*v3.
series.Points = points series.Points = points
} }
if pointsWithNegativeTimestamps > 0 { if pointsWithNegativeTimestamps > 0 {
zap.L().Error("found points with negative timestamps for query", zap.String("query", query)) q.logger.ErrorContext(ctx, "found points with negative timestamps for query", "query", query)
} }
return result, err return result, err
} }
@@ -167,7 +172,7 @@ func (q *querier) runBuilderQueries(ctx context.Context, orgID valuer.UUID, para
wg.Wait() wg.Wait()
close(ch) close(ch)
zap.L().Info("time taken to run builder queries", zap.Duration("multiQueryDuration", time.Since(now)), zap.Int("num_queries", len(params.CompositeQuery.BuilderQueries))) q.logger.InfoContext(ctx, "time taken to run builder queries", "multi_query_duration", time.Since(now), "num_queries", len(params.CompositeQuery.BuilderQueries))
results := make([]*v3.Result, 0) results := make([]*v3.Result, 0)
errQueriesByName := make(map[string]error) errQueriesByName := make(map[string]error)
@@ -208,14 +213,14 @@ func (q *querier) runPromQueries(ctx context.Context, orgID valuer.UUID, params
cacheKey, ok := cacheKeys[queryName] cacheKey, ok := cacheKeys[queryName]
if !ok || params.NoCache { if !ok || params.NoCache {
zap.L().Info("skipping cache for metrics prom query", zap.String("queryName", queryName), zap.Int64("start", params.Start), zap.Int64("end", params.End), zap.Int64("step", params.Step), zap.Bool("noCache", params.NoCache), zap.String("cacheKey", cacheKeys[queryName])) q.logger.InfoContext(ctx, "skipping cache for metrics prom query", "query_name", queryName, "start", params.Start, "end", params.End, "step", params.Step, "no_cache", params.NoCache, "cache_key", cacheKeys[queryName])
query := metricsV4.BuildPromQuery(promQuery, params.Step, params.Start, params.End) query := metricsV4.BuildPromQuery(promQuery, params.Step, params.Start, params.End)
series, err := q.execPromQuery(ctx, query) series, err := q.execPromQuery(ctx, query)
channelResults <- channelResult{Err: err, Name: queryName, Query: query.Query, Series: series} channelResults <- channelResult{Err: err, Name: queryName, Query: query.Query, Series: series}
return return
} }
misses := q.queryCache.FindMissingTimeRanges(orgID, params.Start, params.End, params.Step, cacheKey) misses := q.queryCache.FindMissingTimeRanges(orgID, params.Start, params.End, params.Step, cacheKey)
zap.L().Info("cache misses for metrics prom query", zap.Any("misses", misses)) q.logger.InfoContext(ctx, "cache misses for metrics prom query", "misses", misses)
missedSeries := make([]querycache.CachedSeriesData, 0) missedSeries := make([]querycache.CachedSeriesData, 0)
for _, miss := range misses { for _, miss := range misses {
query := metricsV4.BuildPromQuery(promQuery, params.Step, miss.Start, miss.End) query := metricsV4.BuildPromQuery(promQuery, params.Step, miss.Start, miss.End)

View File

@@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"math" "math"
"strings" "strings"
"log/slog"
"testing" "testing"
"time" "time"
@@ -1455,6 +1456,7 @@ func Test_querier_Traces_runWindowBasedListQueryDesc(t *testing.T) {
// Create reader and querier // Create reader and querier
reader := clickhouseReader.NewReader( reader := clickhouseReader.NewReader(
slog.Default(),
nil, nil,
telemetryStore, telemetryStore,
prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore), prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore),
@@ -1680,6 +1682,7 @@ func Test_querier_Traces_runWindowBasedListQueryAsc(t *testing.T) {
// Create reader and querier // Create reader and querier
reader := clickhouseReader.NewReader( reader := clickhouseReader.NewReader(
slog.Default(),
nil, nil,
telemetryStore, telemetryStore,
prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore), prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore),
@@ -1979,6 +1982,7 @@ func Test_querier_Logs_runWindowBasedListQueryDesc(t *testing.T) {
// Create reader and querier // Create reader and querier
reader := clickhouseReader.NewReader( reader := clickhouseReader.NewReader(
slog.Default(),
nil, nil,
telemetryStore, telemetryStore,
prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore), prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore),
@@ -2206,6 +2210,7 @@ func Test_querier_Logs_runWindowBasedListQueryAsc(t *testing.T) {
// Create reader and querier // Create reader and querier
reader := clickhouseReader.NewReader( reader := clickhouseReader.NewReader(
slog.Default(),
nil, nil,
telemetryStore, telemetryStore,
prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore), prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, telemetryStore),

View File

@@ -4,12 +4,13 @@ import (
"fmt" "fmt"
"strings" "strings"
"log/slog"
"github.com/SigNoz/govaluate" "github.com/SigNoz/govaluate"
"github.com/SigNoz/signoz/pkg/cache" "github.com/SigNoz/signoz/pkg/cache"
metricsV3 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v3" metricsV3 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v3"
"github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/query-service/constants"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"go.uber.org/zap"
) )
var SupportedFunctions = []string{ var SupportedFunctions = []string{
@@ -238,7 +239,7 @@ func (qb *QueryBuilder) PrepareQueries(params *v3.QueryRangeParamsV3) (map[strin
} }
queries[queryName] = queryString queries[queryName] = queryString
default: default:
zap.L().Error("Unknown data source", zap.String("dataSource", string(query.DataSource))) slog.Error("unknown data source", "data_source", string(query.DataSource))
} }
} }
} }

View File

@@ -44,7 +44,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/utils" "github.com/SigNoz/signoz/pkg/query-service/utils"
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux" "go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
"go.opentelemetry.io/otel/propagation" "go.opentelemetry.io/otel/propagation"
"go.uber.org/zap" "log/slog"
) )
// Server runs HTTP, Mux and a grpc server // Server runs HTTP, Mux and a grpc server
@@ -87,6 +87,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
} }
reader := clickhouseReader.NewReader( reader := clickhouseReader.NewReader(
signoz.Instrumentation.Logger(),
signoz.SQLStore, signoz.SQLStore,
signoz.TelemetryStore, signoz.TelemetryStore,
signoz.Prometheus, signoz.Prometheus,
@@ -259,7 +260,7 @@ func (s *Server) initListeners() error {
return err return err
} }
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort)) slog.Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort))
return nil return nil
} }
@@ -279,31 +280,31 @@ func (s *Server) Start(ctx context.Context) error {
} }
go func() { go func() {
zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.httpHostPort)) slog.Info("Starting HTTP server", "port", httpPort, "addr", s.httpHostPort)
switch err := s.httpServer.Serve(s.httpConn); err { switch err := s.httpServer.Serve(s.httpConn); err {
case nil, http.ErrServerClosed, cmux.ErrListenerClosed: case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
// normal exit, nothing to do // normal exit, nothing to do
default: default:
zap.L().Error("Could not start HTTP server", zap.Error(err)) slog.Error("Could not start HTTP server", "error", err)
} }
s.unavailableChannel <- healthcheck.Unavailable s.unavailableChannel <- healthcheck.Unavailable
}() }()
go func() { go func() {
zap.L().Info("Starting pprof server", zap.String("addr", constants.DebugHttpPort)) slog.Info("Starting pprof server", "addr", constants.DebugHttpPort)
err = http.ListenAndServe(constants.DebugHttpPort, nil) err = http.ListenAndServe(constants.DebugHttpPort, nil)
if err != nil { if err != nil {
zap.L().Error("Could not start pprof server", zap.Error(err)) slog.Error("Could not start pprof server", "error", err)
} }
}() }()
go func() { go func() {
zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", constants.OpAmpWsEndpoint)) slog.Info("Starting OpAmp Websocket server", "addr", constants.OpAmpWsEndpoint)
err := s.opampServer.Start(constants.OpAmpWsEndpoint) err := s.opampServer.Start(constants.OpAmpWsEndpoint)
if err != nil { if err != nil {
zap.L().Info("opamp ws server failed to start", zap.Error(err)) slog.Error("opamp ws server failed to start", "error", err)
s.unavailableChannel <- healthcheck.Unavailable s.unavailableChannel <- healthcheck.Unavailable
} }
}() }()
@@ -348,10 +349,9 @@ func makeRulesManager(
MetadataStore: metadataStore, MetadataStore: metadataStore,
Prometheus: prometheus, Prometheus: prometheus,
Context: context.Background(), Context: context.Background(),
Logger: zap.L(),
Reader: ch, Reader: ch,
Querier: querier, Querier: querier,
SLogger: providerSettings.Logger, Logger: providerSettings.Logger,
Cache: cache, Cache: cache,
EvalDelay: constants.GetEvalDelay(), EvalDelay: constants.GetEvalDelay(),
OrgGetter: orgGetter, OrgGetter: orgGetter,
@@ -368,7 +368,7 @@ func makeRulesManager(
return nil, fmt.Errorf("rule manager error: %v", err) return nil, fmt.Errorf("rule manager error: %v", err)
} }
zap.L().Info("rules manager is ready") slog.Info("rules manager is ready")
return manager, nil return manager, nil
} }

View File

@@ -11,8 +11,9 @@ import (
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux" "github.com/gorilla/mux"
"log/slog"
explorer "github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer" explorer "github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer"
"go.uber.org/zap"
) )
func (aH *APIHandler) FilterKeysSuggestion(w http.ResponseWriter, r *http.Request) { func (aH *APIHandler) FilterKeysSuggestion(w http.ResponseWriter, r *http.Request) {
@@ -22,13 +23,13 @@ func (aH *APIHandler) FilterKeysSuggestion(w http.ResponseWriter, r *http.Reques
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes)) r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
params, apiError := explorer.ParseFilterKeySuggestions(r) params, apiError := explorer.ParseFilterKeySuggestions(r)
if apiError != nil { if apiError != nil {
zap.L().Error("error parsing summary filter keys request", zap.Error(apiError.Err)) slog.ErrorContext(ctx, "error parsing summary filter keys request", "error", apiError.Err)
RespondError(w, apiError, nil) RespondError(w, apiError, nil)
return return
} }
keys, apiError := aH.SummaryService.FilterKeys(ctx, params) keys, apiError := aH.SummaryService.FilterKeys(ctx, params)
if apiError != nil { if apiError != nil {
zap.L().Error("error getting filter keys", zap.Error(apiError.Err)) slog.ErrorContext(ctx, "error getting filter keys", "error", apiError.Err)
RespondError(w, apiError, nil) RespondError(w, apiError, nil)
return return
} }
@@ -52,14 +53,14 @@ func (aH *APIHandler) FilterValuesSuggestion(w http.ResponseWriter, r *http.Requ
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes)) r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
params, apiError := explorer.ParseFilterValueSuggestions(r) params, apiError := explorer.ParseFilterValueSuggestions(r)
if apiError != nil { if apiError != nil {
zap.L().Error("error parsing summary filter values request", zap.Error(apiError.Err)) slog.ErrorContext(ctx, "error parsing summary filter values request", "error", apiError.Err)
RespondError(w, apiError, nil) RespondError(w, apiError, nil)
return return
} }
values, apiError := aH.SummaryService.FilterValues(ctx, orgID, params) values, apiError := aH.SummaryService.FilterValues(ctx, orgID, params)
if apiError != nil { if apiError != nil {
zap.L().Error("error getting filter values", zap.Error(apiError.Err)) slog.ErrorContext(ctx, "error getting filter values", "error", apiError.Err)
RespondError(w, apiError, nil) RespondError(w, apiError, nil)
return return
} }
@@ -82,7 +83,7 @@ func (aH *APIHandler) GetMetricsDetails(w http.ResponseWriter, r *http.Request)
metricName := mux.Vars(r)["metric_name"] metricName := mux.Vars(r)["metric_name"]
metricsDetail, apiError := aH.SummaryService.GetMetricsSummary(ctx, orgID, metricName) metricsDetail, apiError := aH.SummaryService.GetMetricsSummary(ctx, orgID, metricName)
if apiError != nil { if apiError != nil {
zap.L().Error("error getting metrics summary error", zap.Error(apiError.Err)) slog.ErrorContext(ctx, "error getting metrics summary", "error", apiError.Err)
RespondError(w, apiError, nil) RespondError(w, apiError, nil)
return return
} }
@@ -106,14 +107,14 @@ func (aH *APIHandler) ListMetrics(w http.ResponseWriter, r *http.Request) {
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes)) r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
params, apiErr := explorer.ParseSummaryListMetricsParams(r) params, apiErr := explorer.ParseSummaryListMetricsParams(r)
if apiErr != nil { if apiErr != nil {
zap.L().Error("error parsing metric list metric summary api request", zap.Error(apiErr.Err)) slog.ErrorContext(ctx, "error parsing metric list metric summary api request", "error", apiErr.Err)
RespondError(w, model.BadRequest(apiErr), nil) RespondError(w, model.BadRequest(apiErr), nil)
return return
} }
slmr, apiErr := aH.SummaryService.ListMetricsWithSummary(ctx, orgID, params) slmr, apiErr := aH.SummaryService.ListMetricsWithSummary(ctx, orgID, params)
if apiErr != nil { if apiErr != nil {
zap.L().Error("error in getting list metrics summary", zap.Error(apiErr.Err)) slog.ErrorContext(ctx, "error in getting list metrics summary", "error", apiErr.Err)
RespondError(w, apiErr, nil) RespondError(w, apiErr, nil)
return return
} }
@@ -126,13 +127,13 @@ func (aH *APIHandler) GetTreeMap(w http.ResponseWriter, r *http.Request) {
ctx := r.Context() ctx := r.Context()
params, apiError := explorer.ParseTreeMapMetricsParams(r) params, apiError := explorer.ParseTreeMapMetricsParams(r)
if apiError != nil { if apiError != nil {
zap.L().Error("error parsing tree map metric params", zap.Error(apiError.Err)) slog.ErrorContext(ctx, "error parsing tree map metric params", "error", apiError.Err)
RespondError(w, apiError, nil) RespondError(w, apiError, nil)
return return
} }
result, apiError := aH.SummaryService.GetMetricsTreemap(ctx, params) result, apiError := aH.SummaryService.GetMetricsTreemap(ctx, params)
if apiError != nil { if apiError != nil {
zap.L().Error("error getting tree map data", zap.Error(apiError.Err)) slog.ErrorContext(ctx, "error getting tree map data", "error", apiError.Err)
RespondError(w, apiError, nil) RespondError(w, apiError, nil)
return return
} }
@@ -146,13 +147,13 @@ func (aH *APIHandler) GetRelatedMetrics(w http.ResponseWriter, r *http.Request)
ctx := r.Context() ctx := r.Context()
params, apiError := explorer.ParseRelatedMetricsParams(r) params, apiError := explorer.ParseRelatedMetricsParams(r)
if apiError != nil { if apiError != nil {
zap.L().Error("error parsing related metric params", zap.Error(apiError.Err)) slog.ErrorContext(ctx, "error parsing related metric params", "error", apiError.Err)
RespondError(w, apiError, nil) RespondError(w, apiError, nil)
return return
} }
result, apiError := aH.SummaryService.GetRelatedMetrics(ctx, params) result, apiError := aH.SummaryService.GetRelatedMetrics(ctx, params)
if apiError != nil { if apiError != nil {
zap.L().Error("error getting related metrics", zap.Error(apiError.Err)) slog.ErrorContext(ctx, "error getting related metrics", "error", apiError.Err)
RespondError(w, apiError, nil) RespondError(w, apiError, nil)
return return
} }
@@ -166,13 +167,13 @@ func (aH *APIHandler) GetInspectMetricsData(w http.ResponseWriter, r *http.Reque
ctx := r.Context() ctx := r.Context()
params, apiError := explorer.ParseInspectMetricsParams(r) params, apiError := explorer.ParseInspectMetricsParams(r)
if apiError != nil { if apiError != nil {
zap.L().Error("error parsing inspect metric params", zap.Error(apiError.Err)) slog.ErrorContext(ctx, "error parsing inspect metric params", "error", apiError.Err)
RespondError(w, apiError, nil) RespondError(w, apiError, nil)
return return
} }
result, apiError := aH.SummaryService.GetInspectMetrics(ctx, params) result, apiError := aH.SummaryService.GetInspectMetrics(ctx, params)
if apiError != nil { if apiError != nil {
zap.L().Error("error getting inspect metrics data", zap.Error(apiError.Err)) slog.ErrorContext(ctx, "error getting inspect metrics data", "error", apiError.Err)
RespondError(w, apiError, nil) RespondError(w, apiError, nil)
return return
} }
@@ -197,13 +198,13 @@ func (aH *APIHandler) UpdateMetricsMetadata(w http.ResponseWriter, r *http.Reque
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes)) r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
params, apiError := explorer.ParseUpdateMetricsMetadataParams(r) params, apiError := explorer.ParseUpdateMetricsMetadataParams(r)
if apiError != nil { if apiError != nil {
zap.L().Error("error parsing update metrics metadata params", zap.Error(apiError.Err)) slog.ErrorContext(ctx, "error parsing update metrics metadata params", "error", apiError.Err)
RespondError(w, apiError, nil) RespondError(w, apiError, nil)
return return
} }
apiError = aH.SummaryService.UpdateMetricsMetadata(ctx, orgID, params) apiError = aH.SummaryService.UpdateMetricsMetadata(ctx, orgID, params)
if apiError != nil { if apiError != nil {
zap.L().Error("error updating metrics metadata", zap.Error(apiError.Err)) slog.ErrorContext(ctx, "error updating metrics metadata", "error", apiError.Err)
RespondError(w, apiError, nil) RespondError(w, apiError, nil)
return return
} }

View File

@@ -2,10 +2,10 @@ package smart
import ( import (
"errors" "errors"
"log/slog"
"strconv" "strconv"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model" basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"go.uber.org/zap"
) )
// SmartTraceAlgorithm is an algorithm to find the target span and build a tree of spans around it with the given levelUp and levelDown parameters and the given spanLimit // SmartTraceAlgorithm is an algorithm to find the target span and build a tree of spans around it with the given levelUp and levelDown parameters and the given spanLimit
@@ -53,7 +53,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
break break
} }
if err != nil { if err != nil {
zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err)) slog.Error("error during breadth first search", "error", err)
return nil, err return nil, err
} }
} }
@@ -191,7 +191,7 @@ func buildSpanTrees(spansPtr *[]*SpanForTraceDetails) ([]*SpanForTraceDetails, e
// If the parent span is not found, add current span to list of roots // If the parent span is not found, add current span to list of roots
if parent == nil { if parent == nil {
// zap.L().Debug("Parent Span not found parent_id: ", span.ParentID) // slog.Debug("parent span not found", "parent_id", span.ParentID)
roots = append(roots, span) roots = append(roots, span)
span.ParentID = "" span.ParentID = ""
continue continue

View File

@@ -1,11 +1,11 @@
package v3 package v3
import ( import (
"log/slog"
"strconv" "strconv"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils" "github.com/SigNoz/signoz/pkg/query-service/utils"
"go.uber.org/zap"
) )
var TracesListViewDefaultSelectedColumns = []v3.AttributeKey{ var TracesListViewDefaultSelectedColumns = []v3.AttributeKey{
@@ -68,7 +68,7 @@ func TraceIdFilterUsedWithEqual(params *v3.QueryRangeParamsV3) (bool, []string)
val := item.Value val := item.Value
val, err = utils.ValidateAndCastValue(val, item.Key.DataType) val, err = utils.ValidateAndCastValue(val, item.Key.DataType)
if err != nil { if err != nil {
zap.L().Error("invalid value for key", zap.String("key", item.Key.Key), zap.Error(err)) slog.Error("invalid value for key", "key", item.Key.Key, "error", err)
return false, []string{} return false, []string{}
} }
if val != nil { if val != nil {
@@ -81,7 +81,7 @@ func TraceIdFilterUsedWithEqual(params *v3.QueryRangeParamsV3) (bool, []string)
} }
zap.L().Debug("traceIds", zap.Any("traceIds", traceIds)) slog.Debug("trace_ids", "trace_ids", traceIds)
return traceIdFilterUsed, traceIds return traceIdFilterUsed, traceIds
} }

View File

@@ -9,9 +9,10 @@ import (
"strings" "strings"
"time" "time"
"log/slog"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
"github.com/pkg/errors" "github.com/pkg/errors"
"go.uber.org/zap"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
) )
@@ -937,7 +938,7 @@ func (b *BuilderQuery) SetShiftByFromFunc() {
} else if shift, ok := function.Args[0].(string); ok { } else if shift, ok := function.Args[0].(string); ok {
shiftBy, err := strconv.ParseFloat(shift, 64) shiftBy, err := strconv.ParseFloat(shift, 64)
if err != nil { if err != nil {
zap.L().Error("failed to parse time shift by", zap.String("shift", shift), zap.Error(err)) slog.Error("failed to parse time shift by", "shift", shift, "error", err)
} }
timeShiftBy = int64(shiftBy) timeShiftBy = int64(shiftBy)
} }

Some files were not shown because too many files have changed in this diff Show More