mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-25 09:42:25 +00:00
Compare commits
20 Commits
feat/cloud
...
chore/issu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7a85ee1602 | ||
|
|
4b0cbb787a | ||
|
|
ff028e366b | ||
|
|
c579614d56 | ||
|
|
78ba2ba356 | ||
|
|
7fd4762e2a | ||
|
|
4e4c9ce5af | ||
|
|
7605775a38 | ||
|
|
cb1a2a8a13 | ||
|
|
1a5d37b25a | ||
|
|
bc4273f2f8 | ||
|
|
77fdd28e93 | ||
|
|
8e08a42617 | ||
|
|
2c3042304a | ||
|
|
c9da09256e | ||
|
|
e8ed22cafb | ||
|
|
4658232025 | ||
|
|
e8add5942e | ||
|
|
ddecf05d9f | ||
|
|
bf13b26a37 |
@@ -41,31 +41,23 @@ services:
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:v0.142.0
|
||||
container_name: schema-migrator-sync
|
||||
telemetrystore-migrator:
|
||||
image: signoz/signoz-otel-collector:v0.142.0
|
||||
container_name: telemetrystore-migrator
|
||||
environment:
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_CLUSTER=cluster
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_REPLICATION=true
|
||||
- SIGNOZ_OTEL_COLLECTOR_TIMEOUT=10m
|
||||
entrypoint:
|
||||
- /bin/sh
|
||||
command:
|
||||
- sync
|
||||
- --cluster-name=cluster
|
||||
- --dsn=tcp://clickhouse:9000
|
||||
- --replication=true
|
||||
- --up=
|
||||
- -c
|
||||
- |
|
||||
/signoz-otel-collector migrate bootstrap &&
|
||||
/signoz-otel-collector migrate sync up &&
|
||||
/signoz-otel-collector migrate async up
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:v0.142.0
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
- --cluster-name=cluster
|
||||
- --dsn=tcp://clickhouse:9000
|
||||
- --replication=true
|
||||
- --up=
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
condition: service_completed_successfully
|
||||
restart: on-failure
|
||||
|
||||
@@ -1,14 +1,23 @@
|
||||
services:
|
||||
signoz-otel-collector:
|
||||
image: signoz/signoz-otel-collector:v0.129.6
|
||||
image: signoz/signoz-otel-collector:v0.142.0
|
||||
container_name: signoz-otel-collector-dev
|
||||
entrypoint:
|
||||
- /bin/sh
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- -c
|
||||
- |
|
||||
/signoz-otel-collector migrate sync check &&
|
||||
/signoz-otel-collector --config=/etc/otel-collector-config.yaml
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_CLUSTER=cluster
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_REPLICATION=true
|
||||
- SIGNOZ_OTEL_COLLECTOR_TIMEOUT=10m
|
||||
ports:
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
|
||||
3
.github/workflows/integrationci.yaml
vendored
3
.github/workflows/integrationci.yaml
vendored
@@ -48,12 +48,13 @@ jobs:
|
||||
- role
|
||||
- ttl
|
||||
- alerts
|
||||
- ingestionkeys
|
||||
sqlstore-provider:
|
||||
- postgres
|
||||
- sqlite
|
||||
clickhouse-version:
|
||||
- 25.5.6
|
||||
- 25.10.5
|
||||
- 25.12.5
|
||||
schema-migrator-version:
|
||||
- v0.142.0
|
||||
postgres-version:
|
||||
|
||||
@@ -318,4 +318,5 @@ user:
|
||||
# The password of the root user. Must meet password requirements.
|
||||
password: ""
|
||||
# The name of the organization to create or look up for the root user.
|
||||
org_name: default
|
||||
org:
|
||||
name: default
|
||||
|
||||
@@ -61,7 +61,6 @@ x-db-depend: &db-depend
|
||||
- clickhouse
|
||||
- clickhouse-2
|
||||
- clickhouse-3
|
||||
- schema-migrator
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
@@ -136,12 +135,17 @@ services:
|
||||
# - "9000:9000"
|
||||
# - "8123:8123"
|
||||
# - "9181:9181"
|
||||
configs:
|
||||
- source: clickhouse-config
|
||||
target: /etc/clickhouse-server/config.xml
|
||||
- source: clickhouse-users
|
||||
target: /etc/clickhouse-server/users.xml
|
||||
- source: clickhouse-custom-function
|
||||
target: /etc/clickhouse-server/custom-function.xml
|
||||
- source: clickhouse-cluster
|
||||
target: /etc/clickhouse-server/config.d/cluster.ha.xml
|
||||
volumes:
|
||||
- ../common/clickhouse/config.xml:/etc/clickhouse-server/config.xml
|
||||
- ../common/clickhouse/users.xml:/etc/clickhouse-server/users.xml
|
||||
- ../common/clickhouse/custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
- ../common/clickhouse/user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
- ../common/clickhouse/cluster.ha.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
- ./clickhouse-setup/data/clickhouse/:/var/lib/clickhouse/
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
clickhouse-2:
|
||||
@@ -151,12 +155,17 @@ services:
|
||||
# - "9001:9000"
|
||||
# - "8124:8123"
|
||||
# - "9182:9181"
|
||||
configs:
|
||||
- source: clickhouse-config
|
||||
target: /etc/clickhouse-server/config.xml
|
||||
- source: clickhouse-users
|
||||
target: /etc/clickhouse-server/users.xml
|
||||
- source: clickhouse-custom-function
|
||||
target: /etc/clickhouse-server/custom-function.xml
|
||||
- source: clickhouse-cluster
|
||||
target: /etc/clickhouse-server/config.d/cluster.ha.xml
|
||||
volumes:
|
||||
- ../common/clickhouse/config.xml:/etc/clickhouse-server/config.xml
|
||||
- ../common/clickhouse/users.xml:/etc/clickhouse-server/users.xml
|
||||
- ../common/clickhouse/custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
- ../common/clickhouse/user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
- ../common/clickhouse/cluster.ha.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
- ./clickhouse-setup/data/clickhouse-2/:/var/lib/clickhouse/
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
clickhouse-3:
|
||||
@@ -166,37 +175,32 @@ services:
|
||||
# - "9002:9000"
|
||||
# - "8125:8123"
|
||||
# - "9183:9181"
|
||||
configs:
|
||||
- source: clickhouse-config
|
||||
target: /etc/clickhouse-server/config.xml
|
||||
- source: clickhouse-users
|
||||
target: /etc/clickhouse-server/users.xml
|
||||
- source: clickhouse-custom-function
|
||||
target: /etc/clickhouse-server/custom-function.xml
|
||||
- source: clickhouse-cluster
|
||||
target: /etc/clickhouse-server/config.d/cluster.ha.xml
|
||||
volumes:
|
||||
- ../common/clickhouse/config.xml:/etc/clickhouse-server/config.xml
|
||||
- ../common/clickhouse/users.xml:/etc/clickhouse-server/users.xml
|
||||
- ../common/clickhouse/custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
||||
- ../common/clickhouse/user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
- ../common/clickhouse/cluster.ha.xml:/etc/clickhouse-server/config.d/cluster.xml
|
||||
- ./clickhouse-setup/data/clickhouse-3/:/var/lib/clickhouse/
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.112.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
image: signoz/signoz:v0.112.1
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
volumes:
|
||||
- ../common/signoz/prometheus.yml:/root/config/prometheus.yml
|
||||
- ../common/dashboards:/root/config/dashboards
|
||||
- ./clickhouse-setup/data/signoz/:/var/lib/signoz/
|
||||
environment:
|
||||
- SIGNOZ_ALERTMANAGER_PROVIDER=signoz
|
||||
- SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
- SIGNOZ_SQLSTORE_SQLITE_PATH=/var/lib/signoz/signoz.db
|
||||
- DASHBOARDS_PATH=/root/config/dashboards
|
||||
- STORAGE=clickhouse
|
||||
- GODEBUG=netdns=go
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-swarm
|
||||
- SIGNOZ_TOKENIZER_JWT_SECRET=secret
|
||||
- DOT_METRICS_ENABLED=true
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD
|
||||
@@ -209,40 +213,48 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.142.0
|
||||
image: signoz/signoz-otel-collector:v0.142.1
|
||||
entrypoint:
|
||||
- /bin/sh
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
- --copy-path=/var/tmp/collector-config.yaml
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
- ../common/signoz/otel-collector-opamp-config.yaml:/etc/manager-config.yaml
|
||||
- -c
|
||||
- |
|
||||
/signoz-otel-collector migrate sync check &&
|
||||
/signoz-otel-collector --config=/etc/otel-collector-config.yaml --manager-config=/etc/manager-config.yaml --copy-path=/var/tmp/collector-config.yaml
|
||||
configs:
|
||||
- source: otel-collector-config
|
||||
target: /etc/otel-collector-config.yaml
|
||||
- source: otel-manager-config
|
||||
target: /etc/manager-config.yaml
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_CLUSTER=cluster
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_REPLICATION=true
|
||||
- SIGNOZ_OTEL_COLLECTOR_TIMEOUT=10m
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
deploy:
|
||||
replicas: 3
|
||||
depends_on:
|
||||
- clickhouse
|
||||
- schema-migrator
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.142.0
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
delay: 5s
|
||||
entrypoint: sh
|
||||
signoz-telemetrystore-migrator:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.142.0}
|
||||
environment:
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_CLUSTER=cluster
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_REPLICATION=true
|
||||
- SIGNOZ_OTEL_COLLECTOR_TIMEOUT=10m
|
||||
entrypoint:
|
||||
- /bin/sh
|
||||
command:
|
||||
- -c
|
||||
- "/signoz-schema-migrator sync --dsn=tcp://clickhouse:9000 --up= && /signoz-schema-migrator async --dsn=tcp://clickhouse:9000 --up="
|
||||
depends_on:
|
||||
- clickhouse
|
||||
- |
|
||||
/signoz-otel-collector migrate bootstrap &&
|
||||
/signoz-otel-collector migrate sync up &&
|
||||
/signoz-otel-collector migrate async up
|
||||
networks:
|
||||
signoz-net:
|
||||
name: signoz-net
|
||||
@@ -261,3 +273,16 @@ volumes:
|
||||
name: signoz-zookeeper-2
|
||||
zookeeper-3:
|
||||
name: signoz-zookeeper-3
|
||||
configs:
|
||||
clickhouse-config:
|
||||
file: ../common/clickhouse/config.xml
|
||||
clickhouse-users:
|
||||
file: ../common/clickhouse/users.xml
|
||||
clickhouse-custom-function:
|
||||
file: ../common/clickhouse/custom-function.xml
|
||||
clickhouse-cluster:
|
||||
file: ../common/clickhouse/cluster.ha.xml
|
||||
otel-collector-config:
|
||||
file: ./otel-collector-config.yaml
|
||||
otel-manager-config:
|
||||
file: ../common/signoz/otel-collector-opamp-config.yaml
|
||||
|
||||
@@ -58,7 +58,6 @@ x-db-depend: &db-depend
|
||||
!!merge <<: *common
|
||||
depends_on:
|
||||
- clickhouse
|
||||
- schema-migrator
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
@@ -114,30 +113,20 @@ services:
|
||||
target: /etc/clickhouse-server/config.d/cluster.xml
|
||||
volumes:
|
||||
- clickhouse:/var/lib/clickhouse/
|
||||
- ../common/clickhouse/user_scripts:/var/lib/clickhouse/user_scripts/
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.112.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
image: signoz/signoz:v0.112.1
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
volumes:
|
||||
- sqlite:/var/lib/signoz/
|
||||
configs:
|
||||
- source: signoz-prometheus-config
|
||||
target: /root/config/prometheus.yml
|
||||
environment:
|
||||
- SIGNOZ_ALERTMANAGER_PROVIDER=signoz
|
||||
- SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
- SIGNOZ_SQLSTORE_SQLITE_PATH=/var/lib/signoz/signoz.db
|
||||
- DASHBOARDS_PATH=/root/config/dashboards
|
||||
- STORAGE=clickhouse
|
||||
- GODEBUG=netdns=go
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-swarm
|
||||
- DOT_METRICS_ENABLED=true
|
||||
- SIGNOZ_TOKENIZER_JWT_SECRET=secret
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD
|
||||
@@ -150,11 +139,14 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.142.0
|
||||
image: signoz/signoz-otel-collector:v0.142.1
|
||||
entrypoint:
|
||||
- /bin/sh
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
- --copy-path=/var/tmp/collector-config.yaml
|
||||
- -c
|
||||
- |
|
||||
/signoz-otel-collector migrate sync check &&
|
||||
/signoz-otel-collector --config=/etc/otel-collector-config.yaml --manager-config=/etc/manager-config.yaml --copy-path=/var/tmp/collector-config.yaml
|
||||
configs:
|
||||
- source: otel-collector-config
|
||||
target: /etc/otel-collector-config.yaml
|
||||
@@ -163,29 +155,32 @@ services:
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_CLUSTER=cluster
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_REPLICATION=true
|
||||
- SIGNOZ_OTEL_COLLECTOR_TIMEOUT=10m
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
deploy:
|
||||
replicas: 3
|
||||
depends_on:
|
||||
- clickhouse
|
||||
- schema-migrator
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.142.0
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
delay: 5s
|
||||
entrypoint: sh
|
||||
signoz-telemetrystore-migrator:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.142.0}
|
||||
environment:
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_CLUSTER=cluster
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_REPLICATION=true
|
||||
- SIGNOZ_OTEL_COLLECTOR_TIMEOUT=10m
|
||||
entrypoint:
|
||||
- /bin/sh
|
||||
command:
|
||||
- -c
|
||||
- "/signoz-schema-migrator sync --dsn=tcp://clickhouse:9000 --up= && /signoz-schema-migrator async --dsn=tcp://clickhouse:9000 --up="
|
||||
depends_on:
|
||||
- clickhouse
|
||||
- |
|
||||
/signoz-otel-collector migrate bootstrap &&
|
||||
/signoz-otel-collector migrate sync up &&
|
||||
/signoz-otel-collector migrate async up
|
||||
networks:
|
||||
signoz-net:
|
||||
name: signoz-net
|
||||
@@ -205,14 +200,6 @@ configs:
|
||||
file: ../common/clickhouse/custom-function.xml
|
||||
clickhouse-cluster:
|
||||
file: ../common/clickhouse/cluster.xml
|
||||
signoz-prometheus-config:
|
||||
file: ../common/signoz/prometheus.yml
|
||||
# If you have multiple dashboard files, you can list them individually:
|
||||
# dashboard-foo:
|
||||
# file: ../common/dashboards/foo.json
|
||||
# dashboard-bar:
|
||||
# file: ../common/dashboards/bar.json
|
||||
|
||||
otel-collector-config:
|
||||
file: ./otel-collector-config.yaml
|
||||
otel-manager-config:
|
||||
|
||||
@@ -82,6 +82,12 @@ exporters:
|
||||
timeout: 45s
|
||||
sending_queue:
|
||||
enabled: false
|
||||
metadataexporter:
|
||||
cache:
|
||||
provider: in_memory
|
||||
dsn: tcp://clickhouse:9000/signoz_metadata
|
||||
enabled: true
|
||||
timeout: 45s
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
@@ -93,19 +99,19 @@ service:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [signozspanmetrics/delta, batch]
|
||||
exporters: [clickhousetraces, signozmeter]
|
||||
exporters: [clickhousetraces, metadataexporter, signozmeter]
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
exporters: [signozclickhousemetrics, metadataexporter, signozmeter]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
exporters: [signozclickhousemetrics, metadataexporter, signozmeter]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [clickhouselogsexporter, signozmeter]
|
||||
exporters: [clickhouselogsexporter, metadataexporter, signozmeter]
|
||||
metrics/meter:
|
||||
receivers: [signozmeter]
|
||||
processors: [batch/meter]
|
||||
|
||||
@@ -62,8 +62,10 @@ x-db-depend: &db-depend
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
condition: service_completed_successfully
|
||||
clickhouse-2:
|
||||
condition: service_healthy
|
||||
clickhouse-3:
|
||||
condition: service_healthy
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
@@ -179,27 +181,17 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.112.0}
|
||||
image: signoz/signoz:${VERSION:-v0.112.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
volumes:
|
||||
- ../common/signoz/prometheus.yml:/root/config/prometheus.yml
|
||||
- ../common/dashboards:/root/config/dashboards
|
||||
- sqlite:/var/lib/signoz/
|
||||
environment:
|
||||
- SIGNOZ_ALERTMANAGER_PROVIDER=signoz
|
||||
- SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
- SIGNOZ_SQLSTORE_SQLITE_PATH=/var/lib/signoz/signoz.db
|
||||
- DASHBOARDS_PATH=/root/config/dashboards
|
||||
- STORAGE=clickhouse
|
||||
- GODEBUG=netdns=go
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-standalone-amd
|
||||
- DOT_METRICS_ENABLED=true
|
||||
- SIGNOZ_TOKENIZER_JWT_SECRET=secret
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD
|
||||
@@ -210,51 +202,48 @@ services:
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.142.0}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.142.1}
|
||||
container_name: signoz-otel-collector
|
||||
entrypoint:
|
||||
- /bin/sh
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
- --copy-path=/var/tmp/collector-config.yaml
|
||||
- -c
|
||||
- |
|
||||
/signoz-otel-collector migrate sync check &&
|
||||
/signoz-otel-collector --config=/etc/otel-collector-config.yaml --manager-config=/etc/manager-config.yaml --copy-path=/var/tmp/collector-config.yaml
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
- ../common/signoz/otel-collector-opamp-config.yaml:/etc/manager-config.yaml
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_CLUSTER=cluster
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_REPLICATION=true
|
||||
- SIGNOZ_OTEL_COLLECTOR_TIMEOUT=10m
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
condition: service_completed_successfully
|
||||
signoz:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.142.0}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
- --dsn=tcp://clickhouse:9000
|
||||
- --up=
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
signoz-telemetrystore-migrator:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.142.0}
|
||||
container_name: schema-migrator-async
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.142.0}
|
||||
container_name: signoz-telemetrystore-migrator
|
||||
environment:
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_CLUSTER=cluster
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_REPLICATION=true
|
||||
- SIGNOZ_OTEL_COLLECTOR_TIMEOUT=10m
|
||||
entrypoint:
|
||||
- /bin/sh
|
||||
command:
|
||||
- async
|
||||
- --dsn=tcp://clickhouse:9000
|
||||
- --up=
|
||||
- -c
|
||||
- |
|
||||
/signoz-otel-collector migrate bootstrap &&
|
||||
/signoz-otel-collector migrate sync up &&
|
||||
/signoz-otel-collector migrate async up
|
||||
restart: on-failure
|
||||
networks:
|
||||
signoz-net:
|
||||
|
||||
@@ -57,8 +57,6 @@ x-db-depend: &db-depend
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
condition: service_completed_successfully
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
@@ -111,27 +109,17 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.112.0}
|
||||
image: signoz/signoz:${VERSION:-v0.112.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
volumes:
|
||||
- ../common/signoz/prometheus.yml:/root/config/prometheus.yml
|
||||
- ../common/dashboards:/root/config/dashboards
|
||||
- sqlite:/var/lib/signoz/
|
||||
environment:
|
||||
- SIGNOZ_ALERTMANAGER_PROVIDER=signoz
|
||||
- SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
- SIGNOZ_SQLSTORE_SQLITE_PATH=/var/lib/signoz/signoz.db
|
||||
- DASHBOARDS_PATH=/root/config/dashboards
|
||||
- STORAGE=clickhouse
|
||||
- GODEBUG=netdns=go
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-standalone-amd
|
||||
- DOT_METRICS_ENABLED=true
|
||||
- SIGNOZ_TOKENIZER_JWT_SECRET=secret
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD
|
||||
@@ -144,45 +132,46 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.142.0}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.142.1}
|
||||
container_name: signoz-otel-collector
|
||||
entrypoint:
|
||||
- /bin/sh
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
- --copy-path=/var/tmp/collector-config.yaml
|
||||
- -c
|
||||
- |
|
||||
/signoz-otel-collector migrate sync check &&
|
||||
/signoz-otel-collector --config=/etc/otel-collector-config.yaml --manager-config=/etc/manager-config.yaml --copy-path=/var/tmp/collector-config.yaml
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
- ../common/signoz/otel-collector-opamp-config.yaml:/etc/manager-config.yaml
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_CLUSTER=cluster
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_REPLICATION=true
|
||||
- SIGNOZ_OTEL_COLLECTOR_TIMEOUT=10m
|
||||
ports:
|
||||
# - "1777:1777" # pprof extension
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
depends_on:
|
||||
signoz:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.142.0}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
- --dsn=tcp://clickhouse:9000
|
||||
- --up=
|
||||
depends_on:
|
||||
clickhouse:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
signoz-telemetrystore-migrator:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.142.0}
|
||||
container_name: schema-migrator-async
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.142.0}
|
||||
container_name: signoz-telemetrystore-migrator
|
||||
environment:
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_CLUSTER=cluster
|
||||
- SIGNOZ_OTEL_COLLECTOR_CLICKHOUSE_REPLICATION=true
|
||||
- SIGNOZ_OTEL_COLLECTOR_TIMEOUT=10m
|
||||
entrypoint:
|
||||
- /bin/sh
|
||||
command:
|
||||
- async
|
||||
- --dsn=tcp://clickhouse:9000
|
||||
- --up=
|
||||
- -c
|
||||
- |
|
||||
/signoz-otel-collector migrate bootstrap &&
|
||||
/signoz-otel-collector migrate sync up &&
|
||||
/signoz-otel-collector migrate async up
|
||||
restart: on-failure
|
||||
networks:
|
||||
signoz-net:
|
||||
|
||||
@@ -82,6 +82,12 @@ exporters:
|
||||
timeout: 45s
|
||||
sending_queue:
|
||||
enabled: false
|
||||
metadataexporter:
|
||||
cache:
|
||||
provider: in_memory
|
||||
dsn: tcp://clickhouse:9000/signoz_metadata
|
||||
enabled: true
|
||||
timeout: 45s
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
@@ -93,19 +99,19 @@ service:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [signozspanmetrics/delta, batch]
|
||||
exporters: [clickhousetraces, signozmeter]
|
||||
exporters: [clickhousetraces, metadataexporter, signozmeter]
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
exporters: [signozclickhousemetrics, metadataexporter, signozmeter]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
exporters: [signozclickhousemetrics, metadataexporter, signozmeter]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [clickhouselogsexporter, signozmeter]
|
||||
exporters: [clickhouselogsexporter, metadataexporter, signozmeter]
|
||||
metrics/meter:
|
||||
receivers: [signozmeter]
|
||||
processors: [batch/meter]
|
||||
|
||||
@@ -80,6 +80,37 @@ components:
|
||||
updatedAt:
|
||||
format: date-time
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
type: object
|
||||
AuthtypesGettableObjects:
|
||||
properties:
|
||||
resource:
|
||||
$ref: '#/components/schemas/AuthtypesResource'
|
||||
selectors:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
required:
|
||||
- resource
|
||||
- selectors
|
||||
type: object
|
||||
AuthtypesGettableResources:
|
||||
properties:
|
||||
relations:
|
||||
additionalProperties:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
nullable: true
|
||||
type: object
|
||||
resources:
|
||||
items:
|
||||
$ref: '#/components/schemas/AuthtypesResource'
|
||||
type: array
|
||||
required:
|
||||
- resources
|
||||
- relations
|
||||
type: object
|
||||
AuthtypesGettableToken:
|
||||
properties:
|
||||
@@ -130,8 +161,6 @@ components:
|
||||
serviceAccountJson:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesName:
|
||||
type: object
|
||||
AuthtypesOIDCConfig:
|
||||
properties:
|
||||
claimMapping:
|
||||
@@ -154,7 +183,7 @@ components:
|
||||
resource:
|
||||
$ref: '#/components/schemas/AuthtypesResource'
|
||||
selector:
|
||||
$ref: '#/components/schemas/AuthtypesSelector'
|
||||
type: string
|
||||
required:
|
||||
- resource
|
||||
- selector
|
||||
@@ -175,6 +204,22 @@ components:
|
||||
provider:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesPatchableObjects:
|
||||
properties:
|
||||
additions:
|
||||
items:
|
||||
$ref: '#/components/schemas/AuthtypesGettableObjects'
|
||||
nullable: true
|
||||
type: array
|
||||
deletions:
|
||||
items:
|
||||
$ref: '#/components/schemas/AuthtypesGettableObjects'
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- additions
|
||||
- deletions
|
||||
type: object
|
||||
AuthtypesPostableAuthDomain:
|
||||
properties:
|
||||
config:
|
||||
@@ -199,7 +244,7 @@ components:
|
||||
AuthtypesResource:
|
||||
properties:
|
||||
name:
|
||||
$ref: '#/components/schemas/AuthtypesName'
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
required:
|
||||
@@ -231,8 +276,6 @@ components:
|
||||
samlIdp:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesSelector:
|
||||
type: object
|
||||
AuthtypesSessionContext:
|
||||
properties:
|
||||
exists:
|
||||
@@ -245,8 +288,6 @@ components:
|
||||
type: object
|
||||
AuthtypesTransaction:
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
object:
|
||||
$ref: '#/components/schemas/AuthtypesObject'
|
||||
relation:
|
||||
@@ -460,10 +501,10 @@ components:
|
||||
GatewaytypesLimitValue:
|
||||
properties:
|
||||
count:
|
||||
format: int64
|
||||
nullable: true
|
||||
type: integer
|
||||
size:
|
||||
format: int64
|
||||
nullable: true
|
||||
type: integer
|
||||
type: object
|
||||
GatewaytypesPagination:
|
||||
@@ -1668,40 +1709,6 @@ components:
|
||||
- status
|
||||
- error
|
||||
type: object
|
||||
RoletypesGettableResources:
|
||||
properties:
|
||||
relations:
|
||||
additionalProperties:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
nullable: true
|
||||
type: object
|
||||
resources:
|
||||
items:
|
||||
$ref: '#/components/schemas/AuthtypesResource'
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- resources
|
||||
- relations
|
||||
type: object
|
||||
RoletypesPatchableObjects:
|
||||
properties:
|
||||
additions:
|
||||
items:
|
||||
$ref: '#/components/schemas/AuthtypesObject'
|
||||
nullable: true
|
||||
type: array
|
||||
deletions:
|
||||
items:
|
||||
$ref: '#/components/schemas/AuthtypesObject'
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- additions
|
||||
- deletions
|
||||
type: object
|
||||
RoletypesPatchableRole:
|
||||
properties:
|
||||
description:
|
||||
@@ -1737,6 +1744,7 @@ components:
|
||||
format: date-time
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
- name
|
||||
- description
|
||||
- type
|
||||
@@ -1874,6 +1882,8 @@ components:
|
||||
$ref: '#/components/schemas/TypesUser'
|
||||
userId:
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
type: object
|
||||
TypesGettableGlobalConfig:
|
||||
properties:
|
||||
@@ -1886,6 +1896,8 @@ components:
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
type: object
|
||||
TypesInvite:
|
||||
properties:
|
||||
@@ -1909,6 +1921,8 @@ components:
|
||||
updatedAt:
|
||||
format: date-time
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
type: object
|
||||
TypesOrganization:
|
||||
properties:
|
||||
@@ -1929,6 +1943,8 @@ components:
|
||||
updatedAt:
|
||||
format: date-time
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
type: object
|
||||
TypesPostableAPIKey:
|
||||
properties:
|
||||
@@ -1992,6 +2008,8 @@ components:
|
||||
type: string
|
||||
token:
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
type: object
|
||||
TypesStorableAPIKey:
|
||||
properties:
|
||||
@@ -2017,6 +2035,8 @@ components:
|
||||
type: string
|
||||
userId:
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
type: object
|
||||
TypesUser:
|
||||
properties:
|
||||
@@ -2038,6 +2058,8 @@ components:
|
||||
updatedAt:
|
||||
format: date-time
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
type: object
|
||||
ZeustypesGettableHost:
|
||||
properties:
|
||||
@@ -2170,6 +2192,35 @@ paths:
|
||||
summary: Check permissions
|
||||
tags:
|
||||
- authz
|
||||
/api/v1/authz/resources:
|
||||
get:
|
||||
deprecated: false
|
||||
description: Gets all the available resources
|
||||
operationId: AuthzResources
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/AuthtypesGettableResources'
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
- status
|
||||
- data
|
||||
type: object
|
||||
description: OK
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
summary: Get resources
|
||||
tags:
|
||||
- authz
|
||||
/api/v1/changePassword/{id}:
|
||||
post:
|
||||
deprecated: false
|
||||
@@ -4342,7 +4393,7 @@ paths:
|
||||
properties:
|
||||
data:
|
||||
items:
|
||||
$ref: '#/components/schemas/AuthtypesObject'
|
||||
$ref: '#/components/schemas/AuthtypesGettableObjects'
|
||||
type: array
|
||||
status:
|
||||
type: string
|
||||
@@ -4415,7 +4466,7 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RoletypesPatchableObjects'
|
||||
$ref: '#/components/schemas/AuthtypesPatchableObjects'
|
||||
responses:
|
||||
"204":
|
||||
content:
|
||||
@@ -4473,52 +4524,6 @@ paths:
|
||||
summary: Patch objects for a role by relation
|
||||
tags:
|
||||
- role
|
||||
/api/v1/roles/resources:
|
||||
get:
|
||||
deprecated: false
|
||||
description: Gets all the available resources for role assignment
|
||||
operationId: GetResources
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/RoletypesGettableResources'
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
- status
|
||||
- data
|
||||
type: object
|
||||
description: OK
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Get resources
|
||||
tags:
|
||||
- role
|
||||
/api/v1/user:
|
||||
get:
|
||||
deprecated: false
|
||||
@@ -5091,7 +5096,7 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/GatewaytypesPostableIngestionKey'
|
||||
responses:
|
||||
"200":
|
||||
"201":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
@@ -5104,7 +5109,7 @@ paths:
|
||||
- status
|
||||
- data
|
||||
type: object
|
||||
description: OK
|
||||
description: Created
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
@@ -5532,6 +5537,12 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
@@ -5601,6 +5612,12 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
@@ -5659,6 +5676,12 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
@@ -5718,6 +5741,12 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
|
||||
@@ -171,8 +171,6 @@ func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource
|
||||
for _, register := range provider.registry {
|
||||
typeables = append(typeables, register.MustGetTypeables()...)
|
||||
}
|
||||
// role module cannot self register itself!
|
||||
typeables = append(typeables, provider.MustGetTypeables()...)
|
||||
|
||||
resources := make([]*authtypes.Resource, 0)
|
||||
for _, typeable := range typeables {
|
||||
@@ -259,7 +257,7 @@ func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, id valu
|
||||
}
|
||||
|
||||
role := roletypes.NewRoleFromStorableRole(storableRole)
|
||||
err = role.CanEditDelete()
|
||||
err = role.ErrIfManaged()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -308,3 +308,15 @@ export const PublicDashboardPage = Loadable(
|
||||
/* webpackChunkName: "Public Dashboard Page" */ 'pages/PublicDashboard'
|
||||
),
|
||||
);
|
||||
|
||||
export const AlertTypeSelectionPage = Loadable(
|
||||
() =>
|
||||
import(
|
||||
/* webpackChunkName: "Alert Type Selection Page" */ 'pages/AlertTypeSelection'
|
||||
),
|
||||
);
|
||||
|
||||
export const MeterExplorerPage = Loadable(
|
||||
() =>
|
||||
import(/* webpackChunkName: "Meter Explorer Page" */ 'pages/MeterExplorer'),
|
||||
);
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
import { RouteProps } from 'react-router-dom';
|
||||
import ROUTES from 'constants/routes';
|
||||
import AlertTypeSelectionPage from 'pages/AlertTypeSelection';
|
||||
import MessagingQueues from 'pages/MessagingQueues';
|
||||
import MeterExplorer from 'pages/MeterExplorer';
|
||||
|
||||
import {
|
||||
AlertHistory,
|
||||
AlertOverview,
|
||||
AlertTypeSelectionPage,
|
||||
AllAlertChannels,
|
||||
AllErrors,
|
||||
ApiMonitoring,
|
||||
@@ -29,6 +27,8 @@ import {
|
||||
LogsExplorer,
|
||||
LogsIndexToFields,
|
||||
LogsSaveViews,
|
||||
MessagingQueuesMainPage,
|
||||
MeterExplorerPage,
|
||||
MetricsExplorer,
|
||||
OldLogsExplorer,
|
||||
Onboarding,
|
||||
@@ -399,28 +399,28 @@ const routes: AppRoutes[] = [
|
||||
{
|
||||
path: ROUTES.MESSAGING_QUEUES_KAFKA,
|
||||
exact: true,
|
||||
component: MessagingQueues,
|
||||
component: MessagingQueuesMainPage,
|
||||
key: 'MESSAGING_QUEUES_KAFKA',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.MESSAGING_QUEUES_CELERY_TASK,
|
||||
exact: true,
|
||||
component: MessagingQueues,
|
||||
component: MessagingQueuesMainPage,
|
||||
key: 'MESSAGING_QUEUES_CELERY_TASK',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.MESSAGING_QUEUES_OVERVIEW,
|
||||
exact: true,
|
||||
component: MessagingQueues,
|
||||
component: MessagingQueuesMainPage,
|
||||
key: 'MESSAGING_QUEUES_OVERVIEW',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.MESSAGING_QUEUES_KAFKA_DETAIL,
|
||||
exact: true,
|
||||
component: MessagingQueues,
|
||||
component: MessagingQueuesMainPage,
|
||||
key: 'MESSAGING_QUEUES_KAFKA_DETAIL',
|
||||
isPrivate: true,
|
||||
},
|
||||
@@ -463,21 +463,21 @@ const routes: AppRoutes[] = [
|
||||
{
|
||||
path: ROUTES.METER,
|
||||
exact: true,
|
||||
component: MeterExplorer,
|
||||
component: MeterExplorerPage,
|
||||
key: 'METER',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.METER_EXPLORER,
|
||||
exact: true,
|
||||
component: MeterExplorer,
|
||||
component: MeterExplorerPage,
|
||||
key: 'METER_EXPLORER',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.METER_EXPLORER_VIEWS,
|
||||
exact: true,
|
||||
component: MeterExplorer,
|
||||
component: MeterExplorerPage,
|
||||
key: 'METER_EXPLORER_VIEWS',
|
||||
isPrivate: true,
|
||||
},
|
||||
|
||||
@@ -5,17 +5,24 @@
|
||||
* SigNoz
|
||||
*/
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
QueryClient,
|
||||
QueryFunction,
|
||||
QueryKey,
|
||||
UseMutationOptions,
|
||||
UseMutationResult,
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation } from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
|
||||
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
|
||||
import type {
|
||||
AuthtypesTransactionDTO,
|
||||
AuthzCheck200,
|
||||
AuthzResources200,
|
||||
RenderErrorResponseDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
@@ -106,3 +113,88 @@ export const useAuthzCheck = <
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* Gets all the available resources
|
||||
* @summary Get resources
|
||||
*/
|
||||
export const authzResources = (signal?: AbortSignal) => {
|
||||
return GeneratedAPIInstance<AuthzResources200>({
|
||||
url: `/api/v1/authz/resources`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getAuthzResourcesQueryKey = () => {
|
||||
return [`/api/v1/authz/resources`] as const;
|
||||
};
|
||||
|
||||
export const getAuthzResourcesQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof authzResources>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof authzResources>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
}) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getAuthzResourcesQueryKey();
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof authzResources>>> = ({
|
||||
signal,
|
||||
}) => authzResources(signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof authzResources>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type AuthzResourcesQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof authzResources>>
|
||||
>;
|
||||
export type AuthzResourcesQueryError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Get resources
|
||||
*/
|
||||
|
||||
export function useAuthzResources<
|
||||
TData = Awaited<ReturnType<typeof authzResources>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof authzResources>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getAuthzResourcesQueryOptions(options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get resources
|
||||
*/
|
||||
export const invalidateAuthzResources = async (
|
||||
queryClient: QueryClient,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getAuthzResourcesQueryKey() },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
@@ -20,7 +20,7 @@ import { useMutation, useQuery } from 'react-query';
|
||||
import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
|
||||
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
|
||||
import type {
|
||||
CreateIngestionKey200,
|
||||
CreateIngestionKey201,
|
||||
CreateIngestionKeyLimit201,
|
||||
CreateIngestionKeyLimitPathParameters,
|
||||
DeleteIngestionKeyLimitPathParameters,
|
||||
@@ -151,7 +151,7 @@ export const createIngestionKey = (
|
||||
gatewaytypesPostableIngestionKeyDTO: BodyType<GatewaytypesPostableIngestionKeyDTO>,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<CreateIngestionKey200>({
|
||||
return GeneratedAPIInstance<CreateIngestionKey201>({
|
||||
url: `/api/v2/gateway/ingestion_keys`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
|
||||
@@ -20,18 +20,17 @@ import { useMutation, useQuery } from 'react-query';
|
||||
import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
|
||||
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
|
||||
import type {
|
||||
AuthtypesPatchableObjectsDTO,
|
||||
CreateRole201,
|
||||
DeleteRolePathParameters,
|
||||
GetObjects200,
|
||||
GetObjectsPathParameters,
|
||||
GetResources200,
|
||||
GetRole200,
|
||||
GetRolePathParameters,
|
||||
ListRoles200,
|
||||
PatchObjectsPathParameters,
|
||||
PatchRolePathParameters,
|
||||
RenderErrorResponseDTO,
|
||||
RoletypesPatchableObjectsDTO,
|
||||
RoletypesPatchableRoleDTO,
|
||||
RoletypesPostableRoleDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
@@ -575,13 +574,13 @@ export const invalidateGetObjects = async (
|
||||
*/
|
||||
export const patchObjects = (
|
||||
{ id, relation }: PatchObjectsPathParameters,
|
||||
roletypesPatchableObjectsDTO: BodyType<RoletypesPatchableObjectsDTO>,
|
||||
authtypesPatchableObjectsDTO: BodyType<AuthtypesPatchableObjectsDTO>,
|
||||
) => {
|
||||
return GeneratedAPIInstance<string>({
|
||||
url: `/api/v1/roles/${id}/relation/${relation}/objects`,
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: roletypesPatchableObjectsDTO,
|
||||
data: authtypesPatchableObjectsDTO,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -594,7 +593,7 @@ export const getPatchObjectsMutationOptions = <
|
||||
TError,
|
||||
{
|
||||
pathParams: PatchObjectsPathParameters;
|
||||
data: BodyType<RoletypesPatchableObjectsDTO>;
|
||||
data: BodyType<AuthtypesPatchableObjectsDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
@@ -603,7 +602,7 @@ export const getPatchObjectsMutationOptions = <
|
||||
TError,
|
||||
{
|
||||
pathParams: PatchObjectsPathParameters;
|
||||
data: BodyType<RoletypesPatchableObjectsDTO>;
|
||||
data: BodyType<AuthtypesPatchableObjectsDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
@@ -620,7 +619,7 @@ export const getPatchObjectsMutationOptions = <
|
||||
Awaited<ReturnType<typeof patchObjects>>,
|
||||
{
|
||||
pathParams: PatchObjectsPathParameters;
|
||||
data: BodyType<RoletypesPatchableObjectsDTO>;
|
||||
data: BodyType<AuthtypesPatchableObjectsDTO>;
|
||||
}
|
||||
> = (props) => {
|
||||
const { pathParams, data } = props ?? {};
|
||||
@@ -634,7 +633,7 @@ export const getPatchObjectsMutationOptions = <
|
||||
export type PatchObjectsMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof patchObjects>>
|
||||
>;
|
||||
export type PatchObjectsMutationBody = BodyType<RoletypesPatchableObjectsDTO>;
|
||||
export type PatchObjectsMutationBody = BodyType<AuthtypesPatchableObjectsDTO>;
|
||||
export type PatchObjectsMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
@@ -649,7 +648,7 @@ export const usePatchObjects = <
|
||||
TError,
|
||||
{
|
||||
pathParams: PatchObjectsPathParameters;
|
||||
data: BodyType<RoletypesPatchableObjectsDTO>;
|
||||
data: BodyType<AuthtypesPatchableObjectsDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
@@ -658,7 +657,7 @@ export const usePatchObjects = <
|
||||
TError,
|
||||
{
|
||||
pathParams: PatchObjectsPathParameters;
|
||||
data: BodyType<RoletypesPatchableObjectsDTO>;
|
||||
data: BodyType<AuthtypesPatchableObjectsDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
@@ -666,88 +665,3 @@ export const usePatchObjects = <
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* Gets all the available resources for role assignment
|
||||
* @summary Get resources
|
||||
*/
|
||||
export const getResources = (signal?: AbortSignal) => {
|
||||
return GeneratedAPIInstance<GetResources200>({
|
||||
url: `/api/v1/roles/resources`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetResourcesQueryKey = () => {
|
||||
return [`/api/v1/roles/resources`] as const;
|
||||
};
|
||||
|
||||
export const getGetResourcesQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getResources>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getResources>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
}) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getGetResourcesQueryKey();
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof getResources>>> = ({
|
||||
signal,
|
||||
}) => getResources(signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getResources>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type GetResourcesQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getResources>>
|
||||
>;
|
||||
export type GetResourcesQueryError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Get resources
|
||||
*/
|
||||
|
||||
export function useGetResources<
|
||||
TData = Awaited<ReturnType<typeof getResources>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getResources>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetResourcesQueryOptions(options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get resources
|
||||
*/
|
||||
export const invalidateGetResources = async (
|
||||
queryClient: QueryClient,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetResourcesQueryKey() },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
@@ -81,7 +81,7 @@ export interface AuthtypesGettableAuthDomainDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -108,6 +108,33 @@ export interface AuthtypesGettableAuthDomainDTO {
|
||||
updatedAt?: Date;
|
||||
}
|
||||
|
||||
export interface AuthtypesGettableObjectsDTO {
|
||||
resource: AuthtypesResourceDTO;
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
selectors: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* @nullable
|
||||
*/
|
||||
export type AuthtypesGettableResourcesDTORelations = {
|
||||
[key: string]: string[];
|
||||
} | null;
|
||||
|
||||
export interface AuthtypesGettableResourcesDTO {
|
||||
/**
|
||||
* @type object
|
||||
* @nullable true
|
||||
*/
|
||||
relations: AuthtypesGettableResourcesDTORelations;
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
resources: AuthtypesResourceDTO[];
|
||||
}
|
||||
|
||||
export interface AuthtypesGettableTokenDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -182,10 +209,6 @@ export interface AuthtypesGoogleConfigDTO {
|
||||
serviceAccountJson?: string;
|
||||
}
|
||||
|
||||
export interface AuthtypesNameDTO {
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface AuthtypesOIDCConfigDTO {
|
||||
claimMapping?: AuthtypesAttributeMappingDTO;
|
||||
/**
|
||||
@@ -216,7 +239,10 @@ export interface AuthtypesOIDCConfigDTO {
|
||||
|
||||
export interface AuthtypesObjectDTO {
|
||||
resource: AuthtypesResourceDTO;
|
||||
selector: AuthtypesSelectorDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
selector: string;
|
||||
}
|
||||
|
||||
export interface AuthtypesOrgSessionContextDTO {
|
||||
@@ -239,6 +265,19 @@ export interface AuthtypesPasswordAuthNSupportDTO {
|
||||
provider?: string;
|
||||
}
|
||||
|
||||
export interface AuthtypesPatchableObjectsDTO {
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
additions: AuthtypesGettableObjectsDTO[] | null;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
deletions: AuthtypesGettableObjectsDTO[] | null;
|
||||
}
|
||||
|
||||
export interface AuthtypesPostableAuthDomainDTO {
|
||||
config?: AuthtypesAuthDomainConfigDTO;
|
||||
/**
|
||||
@@ -270,7 +309,10 @@ export interface AuthtypesPostableRotateTokenDTO {
|
||||
}
|
||||
|
||||
export interface AuthtypesResourceDTO {
|
||||
name: AuthtypesNameDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -320,10 +362,6 @@ export interface AuthtypesSamlConfigDTO {
|
||||
samlIdp?: string;
|
||||
}
|
||||
|
||||
export interface AuthtypesSelectorDTO {
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface AuthtypesSessionContextDTO {
|
||||
/**
|
||||
* @type boolean
|
||||
@@ -337,10 +375,6 @@ export interface AuthtypesSessionContextDTO {
|
||||
}
|
||||
|
||||
export interface AuthtypesTransactionDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
object: AuthtypesObjectDTO;
|
||||
/**
|
||||
* @type string
|
||||
@@ -623,14 +657,14 @@ export interface GatewaytypesLimitMetricValueDTO {
|
||||
export interface GatewaytypesLimitValueDTO {
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
* @nullable true
|
||||
*/
|
||||
count?: number;
|
||||
count?: number | null;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
* @nullable true
|
||||
*/
|
||||
size?: number;
|
||||
size?: number | null;
|
||||
}
|
||||
|
||||
export interface GatewaytypesPaginationDTO {
|
||||
@@ -1992,39 +2026,6 @@ export interface RenderErrorResponseDTO {
|
||||
status: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* @nullable
|
||||
*/
|
||||
export type RoletypesGettableResourcesDTORelations = {
|
||||
[key: string]: string[];
|
||||
} | null;
|
||||
|
||||
export interface RoletypesGettableResourcesDTO {
|
||||
/**
|
||||
* @type object
|
||||
* @nullable true
|
||||
*/
|
||||
relations: RoletypesGettableResourcesDTORelations;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
resources: AuthtypesResourceDTO[] | null;
|
||||
}
|
||||
|
||||
export interface RoletypesPatchableObjectsDTO {
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
additions: AuthtypesObjectDTO[] | null;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
deletions: AuthtypesObjectDTO[] | null;
|
||||
}
|
||||
|
||||
export interface RoletypesPatchableRoleDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -2056,7 +2057,7 @@ export interface RoletypesRoleDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -2197,7 +2198,7 @@ export interface TypesGettableAPIKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
@@ -2250,7 +2251,7 @@ export interface TypesIdentifiableDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
}
|
||||
|
||||
export interface TypesInviteDTO {
|
||||
@@ -2266,7 +2267,7 @@ export interface TypesInviteDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -2311,7 +2312,7 @@ export interface TypesOrganizationDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
/**
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
@@ -2417,7 +2418,7 @@ export interface TypesResetPasswordTokenDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -2441,7 +2442,7 @@ export interface TypesStorableAPIKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -2490,7 +2491,7 @@ export interface TypesUserDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
@@ -2606,6 +2607,14 @@ export type AuthzCheck200 = {
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type AuthzResources200 = {
|
||||
data: AuthtypesGettableResourcesDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type ChangePasswordPathParameters = {
|
||||
id: string;
|
||||
};
|
||||
@@ -3017,7 +3026,7 @@ export type GetObjects200 = {
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
data: AuthtypesObjectDTO[];
|
||||
data: AuthtypesGettableObjectsDTO[];
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -3028,14 +3037,6 @@ export type PatchObjectsPathParameters = {
|
||||
id: string;
|
||||
relation: string;
|
||||
};
|
||||
export type GetResources200 = {
|
||||
data: RoletypesGettableResourcesDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type ListUsers200 = {
|
||||
/**
|
||||
* @type array
|
||||
@@ -3137,7 +3138,7 @@ export type GetIngestionKeys200 = {
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type CreateIngestionKey200 = {
|
||||
export type CreateIngestionKey201 = {
|
||||
data: GatewaytypesGettableCreatedIngestionKeyDTO;
|
||||
/**
|
||||
* @type string
|
||||
|
||||
@@ -21,7 +21,7 @@ export function getDefaultCellStyle(isDarkMode?: boolean): CSSProperties {
|
||||
|
||||
export const defaultTableStyle: CSSProperties = {
|
||||
minWidth: '40rem',
|
||||
maxWidth: '60rem',
|
||||
maxWidth: '90rem',
|
||||
};
|
||||
|
||||
export const defaultListViewPanelStyle: CSSProperties = {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { UniversalYAxisUnit } from '../types';
|
||||
import { YAxisCategoryNames } from '../constants';
|
||||
import { UniversalYAxisUnit, YAxisCategory } from '../types';
|
||||
import {
|
||||
getUniversalNameFromMetricUnit,
|
||||
mapMetricUnitToUniversalUnit,
|
||||
@@ -41,29 +42,29 @@ describe('YAxisUnitSelector utils', () => {
|
||||
|
||||
describe('mergeCategories', () => {
|
||||
it('merges categories correctly', () => {
|
||||
const categories1 = [
|
||||
const categories1: YAxisCategory[] = [
|
||||
{
|
||||
name: 'Data',
|
||||
name: YAxisCategoryNames.Data,
|
||||
units: [
|
||||
{ name: 'bytes', id: UniversalYAxisUnit.BYTES },
|
||||
{ name: 'kilobytes', id: UniversalYAxisUnit.KILOBYTES },
|
||||
],
|
||||
},
|
||||
];
|
||||
const categories2 = [
|
||||
const categories2: YAxisCategory[] = [
|
||||
{
|
||||
name: 'Data',
|
||||
name: YAxisCategoryNames.Data,
|
||||
units: [{ name: 'bits', id: UniversalYAxisUnit.BITS }],
|
||||
},
|
||||
{
|
||||
name: 'Time',
|
||||
name: YAxisCategoryNames.Time,
|
||||
units: [{ name: 'seconds', id: UniversalYAxisUnit.SECONDS }],
|
||||
},
|
||||
];
|
||||
const mergedCategories = mergeCategories(categories1, categories2);
|
||||
expect(mergedCategories).toEqual([
|
||||
{
|
||||
name: 'Data',
|
||||
name: YAxisCategoryNames.Data,
|
||||
units: [
|
||||
{ name: 'bytes', id: UniversalYAxisUnit.BYTES },
|
||||
{ name: 'kilobytes', id: UniversalYAxisUnit.KILOBYTES },
|
||||
@@ -71,7 +72,7 @@ describe('YAxisUnitSelector utils', () => {
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Time',
|
||||
name: YAxisCategoryNames.Time,
|
||||
units: [{ name: 'seconds', id: UniversalYAxisUnit.SECONDS }],
|
||||
},
|
||||
]);
|
||||
|
||||
@@ -1,5 +1,36 @@
|
||||
import { UnitFamilyConfig, UniversalYAxisUnit, YAxisUnit } from './types';
|
||||
|
||||
export enum YAxisCategoryNames {
|
||||
Time = 'Time',
|
||||
Data = 'Data',
|
||||
DataRate = 'Data Rate',
|
||||
Count = 'Count',
|
||||
Operations = 'Operations',
|
||||
Percentage = 'Percentage',
|
||||
Boolean = 'Boolean',
|
||||
None = 'None',
|
||||
HashRate = 'Hash Rate',
|
||||
Miscellaneous = 'Miscellaneous',
|
||||
Acceleration = 'Acceleration',
|
||||
Angular = 'Angular',
|
||||
Area = 'Area',
|
||||
Flops = 'FLOPs',
|
||||
Concentration = 'Concentration',
|
||||
Currency = 'Currency',
|
||||
Datetime = 'Datetime',
|
||||
PowerElectrical = 'Power/Electrical',
|
||||
Flow = 'Flow',
|
||||
Force = 'Force',
|
||||
Mass = 'Mass',
|
||||
Length = 'Length',
|
||||
Pressure = 'Pressure',
|
||||
Radiation = 'Radiation',
|
||||
RotationSpeed = 'Rotation Speed',
|
||||
Temperature = 'Temperature',
|
||||
Velocity = 'Velocity',
|
||||
Volume = 'Volume',
|
||||
}
|
||||
|
||||
// Mapping of universal y-axis units to their AWS, UCUM, and OpenMetrics equivalents (if available)
|
||||
export const UniversalYAxisUnitMappings: Partial<
|
||||
Record<UniversalYAxisUnit, Set<YAxisUnit> | null>
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { Y_AXIS_UNIT_NAMES } from './constants';
|
||||
import { YAxisCategoryNames } from './constants';
|
||||
import { UniversalYAxisUnit, YAxisCategory } from './types';
|
||||
|
||||
// Base categories for the universal y-axis units
|
||||
export const BASE_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
{
|
||||
name: 'Time',
|
||||
name: YAxisCategoryNames.Time,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.SECONDS],
|
||||
@@ -37,7 +38,7 @@ export const BASE_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Data',
|
||||
name: YAxisCategoryNames.Data,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.BYTES],
|
||||
@@ -154,7 +155,7 @@ export const BASE_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Data Rate',
|
||||
name: YAxisCategoryNames.DataRate,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.BYTES_SECOND],
|
||||
@@ -295,7 +296,7 @@ export const BASE_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Count',
|
||||
name: YAxisCategoryNames.Count,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.COUNT],
|
||||
@@ -312,7 +313,7 @@ export const BASE_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Operations',
|
||||
name: YAxisCategoryNames.Operations,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.OPS_SECOND],
|
||||
@@ -353,7 +354,7 @@ export const BASE_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Percentage',
|
||||
name: YAxisCategoryNames.Percentage,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.PERCENT],
|
||||
@@ -366,7 +367,7 @@ export const BASE_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Boolean',
|
||||
name: YAxisCategoryNames.Boolean,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.TRUE_FALSE],
|
||||
@@ -382,7 +383,7 @@ export const BASE_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
|
||||
export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
{
|
||||
name: 'Time',
|
||||
name: YAxisCategoryNames.Time,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.DURATION_MS],
|
||||
@@ -419,7 +420,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Data Rate',
|
||||
name: YAxisCategoryNames.DataRate,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.DATA_RATE_PACKETS_PER_SECOND],
|
||||
@@ -428,7 +429,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Boolean',
|
||||
name: YAxisCategoryNames.Boolean,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ON_OFF],
|
||||
@@ -437,7 +438,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'None',
|
||||
name: YAxisCategoryNames.None,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.NONE],
|
||||
@@ -446,7 +447,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Hash Rate',
|
||||
name: YAxisCategoryNames.HashRate,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.HASH_RATE_HASHES_PER_SECOND],
|
||||
@@ -479,7 +480,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Miscellaneous',
|
||||
name: YAxisCategoryNames.Miscellaneous,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MISC_STRING],
|
||||
@@ -520,7 +521,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Acceleration',
|
||||
name: YAxisCategoryNames.Acceleration,
|
||||
units: [
|
||||
{
|
||||
name:
|
||||
@@ -541,7 +542,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Angular',
|
||||
name: YAxisCategoryNames.Angular,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ANGULAR_DEGREE],
|
||||
@@ -566,7 +567,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Area',
|
||||
name: YAxisCategoryNames.Area,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.AREA_SQUARE_METERS],
|
||||
@@ -583,7 +584,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'FLOPs',
|
||||
name: YAxisCategoryNames.Flops,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.FLOPS_FLOPS],
|
||||
@@ -620,7 +621,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Concentration',
|
||||
name: YAxisCategoryNames.Concentration,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.CONCENTRATION_PPM],
|
||||
@@ -677,7 +678,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Currency',
|
||||
name: YAxisCategoryNames.Currency,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.CURRENCY_USD],
|
||||
@@ -774,7 +775,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Datetime',
|
||||
name: YAxisCategoryNames.Datetime,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.DATETIME_ISO],
|
||||
@@ -811,7 +812,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Power/Electrical',
|
||||
name: YAxisCategoryNames.PowerElectrical,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.POWER_WATT],
|
||||
@@ -968,7 +969,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Flow',
|
||||
name: YAxisCategoryNames.Flow,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.FLOW_GALLONS_PER_MINUTE],
|
||||
@@ -1005,7 +1006,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Force',
|
||||
name: YAxisCategoryNames.Force,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.FORCE_NEWTON_METERS],
|
||||
@@ -1026,7 +1027,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Mass',
|
||||
name: YAxisCategoryNames.Mass,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MASS_MILLIGRAM],
|
||||
@@ -1051,7 +1052,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Length',
|
||||
name: YAxisCategoryNames.Length,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.LENGTH_MILLIMETER],
|
||||
@@ -1080,7 +1081,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Pressure',
|
||||
name: YAxisCategoryNames.Pressure,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.PRESSURE_MILLIBAR],
|
||||
@@ -1117,7 +1118,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Radiation',
|
||||
name: YAxisCategoryNames.Radiation,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.RADIATION_BECQUEREL],
|
||||
@@ -1174,7 +1175,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Rotation Speed',
|
||||
name: YAxisCategoryNames.RotationSpeed,
|
||||
units: [
|
||||
{
|
||||
name:
|
||||
@@ -1200,7 +1201,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Temperature',
|
||||
name: YAxisCategoryNames.Temperature,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.TEMPERATURE_CELSIUS],
|
||||
@@ -1217,7 +1218,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Velocity',
|
||||
name: YAxisCategoryNames.Velocity,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.VELOCITY_METERS_PER_SECOND],
|
||||
@@ -1238,7 +1239,7 @@ export const ADDITIONAL_Y_AXIS_CATEGORIES: YAxisCategory[] = [
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Volume',
|
||||
name: YAxisCategoryNames.Volume,
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.VOLUME_MILLILITER],
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import { YAxisCategoryNames } from './constants';
|
||||
|
||||
export interface YAxisUnitSelectorProps {
|
||||
value: string | undefined;
|
||||
onChange: (value: UniversalYAxisUnit) => void;
|
||||
@@ -669,7 +671,7 @@ export interface UnitFamilyConfig {
|
||||
}
|
||||
|
||||
export interface YAxisCategory {
|
||||
name: string;
|
||||
name: YAxisCategoryNames;
|
||||
units: {
|
||||
name: string;
|
||||
id: UniversalYAxisUnit;
|
||||
|
||||
@@ -1,10 +1,17 @@
|
||||
/* eslint-disable sonarjs/no-identical-functions */
|
||||
import { MemoryRouter, useLocation } from 'react-router-dom';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { getNonIntegrationDashboardById } from 'mocks-server/__mockdata__/dashboards';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { fireEvent, render, screen, waitFor } from 'tests/test-utils';
|
||||
import {
|
||||
fireEvent,
|
||||
render,
|
||||
screen,
|
||||
userEvent,
|
||||
waitFor,
|
||||
} from 'tests/test-utils';
|
||||
|
||||
import DashboardDescription from '..';
|
||||
|
||||
@@ -17,6 +24,7 @@ const DASHBOARD_TITLE_TEXT = 'thor';
|
||||
const DASHBOARD_PATH = '/dashboard/4';
|
||||
|
||||
const mockSafeNavigate = jest.fn();
|
||||
const mockNavigate = jest.fn();
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
@@ -28,6 +36,11 @@ jest.mock('react-router-dom', () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom-v5-compat', () => ({
|
||||
...jest.requireActual('react-router-dom-v5-compat'),
|
||||
useNavigate: (): jest.Mock => mockNavigate,
|
||||
}));
|
||||
|
||||
jest.mock(
|
||||
'container/TopNav/DateTimeSelectionV2/index.tsx',
|
||||
() =>
|
||||
@@ -45,6 +58,7 @@ jest.mock('hooks/useSafeNavigate', () => ({
|
||||
describe('Dashboard landing page actions header tests', () => {
|
||||
beforeEach(() => {
|
||||
mockSafeNavigate.mockClear();
|
||||
mockNavigate.mockClear();
|
||||
});
|
||||
|
||||
it('unlock dashboard should be disabled for integrations created dashboards', async () => {
|
||||
@@ -124,7 +138,7 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
await waitFor(() => expect(lockUnlockButton).not.toBeDisabled());
|
||||
});
|
||||
|
||||
it('should navigate to dashboard list with correct params and exclude variables', async () => {
|
||||
it('should navigate back to the dashboard list when clicking the dashboard breadcrumb', async () => {
|
||||
const dashboardUrlWithVariables = `${DASHBOARD_PATH}?variables=%7B%22var1%22%3A%22value1%22%7D&otherParam=test`;
|
||||
const mockLocation = {
|
||||
pathname: DASHBOARD_PATH,
|
||||
@@ -133,6 +147,17 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
|
||||
(useLocation as jest.Mock).mockReturnValue(mockLocation);
|
||||
|
||||
// Simulate having browser history so navigate(-1) fires rather than the
|
||||
// ROUTES.ALL_DASHBOARD fallback.
|
||||
const originalDescriptor = Object.getOwnPropertyDescriptor(
|
||||
window.history,
|
||||
'length',
|
||||
);
|
||||
Object.defineProperty(window.history, 'length', {
|
||||
configurable: true,
|
||||
get: (): number => 2,
|
||||
});
|
||||
|
||||
const { getByText } = render(
|
||||
<MemoryRouter initialEntries={[dashboardUrlWithVariables]}>
|
||||
<DashboardProvider>
|
||||
@@ -154,27 +179,55 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
),
|
||||
);
|
||||
|
||||
// Click the dashboard breadcrumb to navigate back to list
|
||||
const dashboardButton = getByText('Dashboard /');
|
||||
fireEvent.click(dashboardButton);
|
||||
await userEvent.click(dashboardButton);
|
||||
|
||||
// Verify navigation was called with correct URL
|
||||
expect(mockSafeNavigate).toHaveBeenCalledWith(
|
||||
'/dashboard?columnKey=updatedAt&order=descend&page=1&search=',
|
||||
expect(mockNavigate).toHaveBeenCalledWith(-1);
|
||||
expect(mockSafeNavigate).not.toHaveBeenCalled();
|
||||
|
||||
// Restore original state: if there was no own property before, delete the
|
||||
// override so window.history.length falls back to the prototype value again.
|
||||
if (originalDescriptor) {
|
||||
Object.defineProperty(window.history, 'length', originalDescriptor);
|
||||
} else {
|
||||
Reflect.deleteProperty(window.history, 'length');
|
||||
}
|
||||
});
|
||||
|
||||
it('should navigate to dashboard list page when there is no browser history', async () => {
|
||||
const mockLocation = {
|
||||
pathname: DASHBOARD_PATH,
|
||||
search: '',
|
||||
};
|
||||
|
||||
(useLocation as jest.Mock).mockReturnValue(mockLocation);
|
||||
|
||||
// jsdom default: window.history.length === 1, so the fallback fires.
|
||||
const { getByText } = render(
|
||||
<MemoryRouter initialEntries={[DASHBOARD_PATH]}>
|
||||
<DashboardProvider>
|
||||
<DashboardDescription
|
||||
handle={{
|
||||
active: false,
|
||||
enter: (): Promise<void> => Promise.resolve(),
|
||||
exit: (): Promise<void> => Promise.resolve(),
|
||||
node: { current: null },
|
||||
}}
|
||||
/>
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
// Ensure the URL contains only essential dashboard list params
|
||||
const calledUrl = mockSafeNavigate.mock.calls[0][0] as string;
|
||||
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
|
||||
await waitFor(() =>
|
||||
expect(screen.getByTestId(DASHBOARD_TEST_ID)).toHaveTextContent(
|
||||
DASHBOARD_TITLE_TEXT,
|
||||
),
|
||||
);
|
||||
|
||||
// Should have essential dashboard list params
|
||||
expect(urlParams.get('columnKey')).toBe('updatedAt');
|
||||
expect(urlParams.get('order')).toBe('descend');
|
||||
expect(urlParams.get('page')).toBe('1');
|
||||
expect(urlParams.get('search')).toBe('');
|
||||
const dashboardButton = getByText('Dashboard /');
|
||||
await userEvent.click(dashboardButton);
|
||||
|
||||
// Should NOT have variables or other dashboard-specific params
|
||||
expect(urlParams.has('variables')).toBeFalsy();
|
||||
expect(urlParams.has('relativeTime')).toBeFalsy();
|
||||
expect(mockNavigate).toHaveBeenCalledWith(ROUTES.ALL_DASHBOARD);
|
||||
expect(mockSafeNavigate).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
|
||||
import { updateDashboardVariablesStore } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
|
||||
import {
|
||||
enqueueDescendantsOfVariable,
|
||||
enqueueFetchOfAllVariables,
|
||||
@@ -31,6 +32,9 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
const { updateUrlVariable, getUrlVariables } = useVariablesFromUrl();
|
||||
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
const dashboardId = useDashboardVariablesSelector(
|
||||
(state) => state.dashboardId,
|
||||
);
|
||||
const sortedVariablesArray = useDashboardVariablesSelector(
|
||||
(state) => state.sortedVariablesArray,
|
||||
);
|
||||
@@ -96,6 +100,28 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
updateUrlVariable(name || id, value);
|
||||
}
|
||||
|
||||
// Synchronously update the external store with the new variable value so that
|
||||
// child variables see the updated parent value when they refetch, rather than
|
||||
// waiting for setSelectedDashboard → useEffect → updateDashboardVariablesStore.
|
||||
const updatedVariables = { ...dashboardVariables };
|
||||
if (updatedVariables[id]) {
|
||||
updatedVariables[id] = {
|
||||
...updatedVariables[id],
|
||||
selectedValue: value,
|
||||
allSelected,
|
||||
haveCustomValuesSelected,
|
||||
};
|
||||
}
|
||||
if (updatedVariables[name]) {
|
||||
updatedVariables[name] = {
|
||||
...updatedVariables[name],
|
||||
selectedValue: value,
|
||||
allSelected,
|
||||
haveCustomValuesSelected,
|
||||
};
|
||||
}
|
||||
updateDashboardVariablesStore({ dashboardId, variables: updatedVariables });
|
||||
|
||||
setSelectedDashboard((prev) => {
|
||||
if (prev) {
|
||||
const oldVariables = { ...prev?.data.variables };
|
||||
@@ -130,10 +156,12 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
return prev;
|
||||
});
|
||||
|
||||
// Cascade: enqueue query-type descendants for refetching
|
||||
// Cascade: enqueue query-type descendants for refetching.
|
||||
// Safe to call synchronously now that the store already has the updated value.
|
||||
enqueueDescendantsOfVariable(name);
|
||||
},
|
||||
[
|
||||
dashboardId,
|
||||
dashboardVariables,
|
||||
updateLocalStorageDashboardVariables,
|
||||
updateUrlVariable,
|
||||
|
||||
@@ -5,7 +5,7 @@ import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQ
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useVariableFetchState } from 'hooks/dashboard/useVariableFetchState';
|
||||
import sortValues from 'lib/dashboardVariables/sortVariableValues';
|
||||
import { isArray, isEmpty, isString } from 'lodash-es';
|
||||
import { isArray, isEmpty } from 'lodash-es';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
@@ -54,7 +54,7 @@ function QueryVariableInput({
|
||||
onChange,
|
||||
onDropdownVisibleChange,
|
||||
handleClear,
|
||||
applyDefaultIfNeeded,
|
||||
getDefaultValue,
|
||||
} = useDashboardVariableSelectHelper({
|
||||
variableData,
|
||||
optionsData,
|
||||
@@ -68,81 +68,93 @@ function QueryVariableInput({
|
||||
try {
|
||||
setErrorMessage(null);
|
||||
|
||||
// This is just a check given the previously undefined typed name prop. Not significant
|
||||
// This will be changed when we change the schema
|
||||
// TODO: @AshwinBhatkal Perses
|
||||
if (!variableData.name) {
|
||||
return;
|
||||
}
|
||||
|
||||
// if the response is not an array, premature return
|
||||
if (
|
||||
variablesRes?.variableValues &&
|
||||
Array.isArray(variablesRes?.variableValues)
|
||||
!variablesRes?.variableValues ||
|
||||
!Array.isArray(variablesRes?.variableValues)
|
||||
) {
|
||||
const newOptionsData = sortValues(
|
||||
variablesRes?.variableValues,
|
||||
variableData.sort,
|
||||
return;
|
||||
}
|
||||
|
||||
const sortedNewOptions = sortValues(
|
||||
variablesRes.variableValues,
|
||||
variableData.sort,
|
||||
);
|
||||
const sortedOldOptions = sortValues(optionsData, variableData.sort);
|
||||
|
||||
// if options are the same as before, no need to update state or check for selected value validity
|
||||
// ! selectedValue needs to be set in the first pass though, as options are initially empty array and we need to apply default if needed
|
||||
// Expecatation is that when oldOptions are not empty, then there is always some selectedValue
|
||||
if (areArraysEqual(sortedNewOptions, sortedOldOptions)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setOptionsData(sortedNewOptions);
|
||||
|
||||
let isSelectedValueMissingInNewOptions = false;
|
||||
|
||||
// Check if currently selected value(s) are present in the new options list
|
||||
if (isArray(variableData.selectedValue)) {
|
||||
isSelectedValueMissingInNewOptions = variableData.selectedValue.some(
|
||||
(val) => !sortedNewOptions.includes(val),
|
||||
);
|
||||
} else if (
|
||||
variableData.selectedValue &&
|
||||
!sortedNewOptions.includes(variableData.selectedValue)
|
||||
) {
|
||||
isSelectedValueMissingInNewOptions = true;
|
||||
}
|
||||
|
||||
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
|
||||
// If multi-select with ALL option enabled, and ALL is currently selected, we want to maintain that state and select all new options
|
||||
// This block does not depend on selected value because of ALL and also because we would only come here if options are different from the previous
|
||||
if (
|
||||
variableData.multiSelect &&
|
||||
variableData.showALLOption &&
|
||||
variableData.allSelected &&
|
||||
isSelectedValueMissingInNewOptions
|
||||
) {
|
||||
onValueUpdate(variableData.name, variableData.id, sortedNewOptions, true);
|
||||
|
||||
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
|
||||
let valueNotInList = false;
|
||||
// Update tempSelection to maintain ALL state when dropdown is open
|
||||
if (tempSelection !== undefined) {
|
||||
setTempSelection(sortedNewOptions.map((option) => option.toString()));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (isArray(variableData.selectedValue)) {
|
||||
variableData.selectedValue.forEach((val) => {
|
||||
if (!newOptionsData.includes(val)) {
|
||||
valueNotInList = true;
|
||||
}
|
||||
});
|
||||
} else if (
|
||||
isString(variableData.selectedValue) &&
|
||||
!newOptionsData.includes(variableData.selectedValue)
|
||||
) {
|
||||
valueNotInList = true;
|
||||
}
|
||||
const value = variableData.selectedValue;
|
||||
let allSelected = false;
|
||||
|
||||
if (variableData.name && (valueNotInList || variableData.allSelected)) {
|
||||
if (
|
||||
variableData.allSelected &&
|
||||
variableData.multiSelect &&
|
||||
variableData.showALLOption
|
||||
) {
|
||||
if (
|
||||
variableData.name &&
|
||||
variableData.id &&
|
||||
!isEmpty(variableData.selectedValue)
|
||||
) {
|
||||
onValueUpdate(
|
||||
variableData.name,
|
||||
variableData.id,
|
||||
newOptionsData,
|
||||
true,
|
||||
);
|
||||
}
|
||||
if (variableData.multiSelect) {
|
||||
const { selectedValue } = variableData;
|
||||
allSelected =
|
||||
sortedNewOptions.length > 0 &&
|
||||
Array.isArray(selectedValue) &&
|
||||
sortedNewOptions.every((option) => selectedValue.includes(option));
|
||||
}
|
||||
|
||||
// Update tempSelection to maintain ALL state when dropdown is open
|
||||
if (tempSelection !== undefined) {
|
||||
setTempSelection(newOptionsData.map((option) => option.toString()));
|
||||
}
|
||||
} else {
|
||||
const value = variableData.selectedValue;
|
||||
let allSelected = false;
|
||||
|
||||
if (variableData.multiSelect) {
|
||||
const { selectedValue } = variableData;
|
||||
allSelected =
|
||||
newOptionsData.length > 0 &&
|
||||
Array.isArray(selectedValue) &&
|
||||
newOptionsData.every((option) => selectedValue.includes(option));
|
||||
}
|
||||
|
||||
if (
|
||||
variableData.name &&
|
||||
variableData.id &&
|
||||
!isEmpty(variableData.selectedValue)
|
||||
) {
|
||||
onValueUpdate(variableData.name, variableData.id, value, allSelected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setOptionsData(newOptionsData);
|
||||
// Apply default if no value is selected (e.g., new variable, first load)
|
||||
applyDefaultIfNeeded(newOptionsData);
|
||||
if (
|
||||
variableData.name &&
|
||||
variableData.id &&
|
||||
!isEmpty(variableData.selectedValue)
|
||||
) {
|
||||
onValueUpdate(variableData.name, variableData.id, value, allSelected);
|
||||
} else {
|
||||
const defaultValue = getDefaultValue(sortedNewOptions);
|
||||
if (defaultValue !== undefined) {
|
||||
onValueUpdate(
|
||||
variableData.name,
|
||||
variableData.id,
|
||||
defaultValue,
|
||||
allSelected,
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
@@ -155,7 +167,7 @@ function QueryVariableInput({
|
||||
onValueUpdate,
|
||||
tempSelection,
|
||||
setTempSelection,
|
||||
applyDefaultIfNeeded,
|
||||
getDefaultValue,
|
||||
],
|
||||
);
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { act, render } from '@testing-library/react';
|
||||
import * as dashboardVariablesStoreModule from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
|
||||
import {
|
||||
dashboardVariablesStore,
|
||||
setDashboardVariablesStore,
|
||||
@@ -10,6 +11,7 @@ import {
|
||||
IDashboardVariablesStoreState,
|
||||
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import {
|
||||
enqueueDescendantsOfVariable,
|
||||
enqueueFetchOfAllVariables,
|
||||
initializeVariableFetchStore,
|
||||
} from 'providers/Dashboard/store/variableFetchStore';
|
||||
@@ -17,6 +19,17 @@ import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import DashboardVariableSelection from '../DashboardVariableSelection';
|
||||
|
||||
// Mutable container to capture the onValueUpdate callback from VariableItem
|
||||
const mockVariableItemCallbacks: {
|
||||
onValueUpdate?: (
|
||||
name: string,
|
||||
id: string,
|
||||
value: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
haveCustomValuesSelected?: boolean,
|
||||
) => void;
|
||||
} = {};
|
||||
|
||||
// Mock providers/Dashboard/Dashboard
|
||||
const mockSetSelectedDashboard = jest.fn();
|
||||
const mockUpdateLocalStorageDashboardVariables = jest.fn();
|
||||
@@ -56,10 +69,14 @@ jest.mock('react-redux', () => ({
|
||||
useSelector: jest.fn().mockReturnValue({ minTime: 1000, maxTime: 2000 }),
|
||||
}));
|
||||
|
||||
// Mock VariableItem to avoid rendering complexity
|
||||
// VariableItem mock captures the onValueUpdate prop for use in onValueUpdate tests
|
||||
jest.mock('../VariableItem', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => <div data-testid="variable-item" />,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
default: (props: any): JSX.Element => {
|
||||
mockVariableItemCallbacks.onValueUpdate = props.onValueUpdate;
|
||||
return <div data-testid="variable-item" />;
|
||||
},
|
||||
}));
|
||||
|
||||
function createVariable(
|
||||
@@ -200,4 +217,162 @@ describe('DashboardVariableSelection', () => {
|
||||
expect(initializeVariableFetchStore).not.toHaveBeenCalled();
|
||||
expect(enqueueFetchOfAllVariables).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
describe('onValueUpdate', () => {
|
||||
let updateStoreSpy: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
resetStore();
|
||||
jest.clearAllMocks();
|
||||
// Real implementation pass-through — we just want to observe calls
|
||||
updateStoreSpy = jest.spyOn(
|
||||
dashboardVariablesStoreModule,
|
||||
'updateDashboardVariablesStore',
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
updateStoreSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('updates dashboardVariablesStore synchronously before enqueueDescendantsOfVariable', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({ name: 'env', id: 'env-id', order: 0 }),
|
||||
},
|
||||
});
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
const callOrder: string[] = [];
|
||||
updateStoreSpy.mockImplementation(() => {
|
||||
callOrder.push('updateDashboardVariablesStore');
|
||||
});
|
||||
(enqueueDescendantsOfVariable as jest.Mock).mockImplementation(() => {
|
||||
callOrder.push('enqueueDescendantsOfVariable');
|
||||
});
|
||||
|
||||
act(() => {
|
||||
mockVariableItemCallbacks.onValueUpdate?.(
|
||||
'env',
|
||||
'env-id',
|
||||
'production',
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
expect(callOrder).toEqual([
|
||||
'updateDashboardVariablesStore',
|
||||
'enqueueDescendantsOfVariable',
|
||||
]);
|
||||
});
|
||||
|
||||
it('passes updated variable value to dashboardVariablesStore', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({
|
||||
name: 'env',
|
||||
id: 'env-id',
|
||||
order: 0,
|
||||
selectedValue: 'staging',
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
// Clear spy calls that happened during setup/render
|
||||
updateStoreSpy.mockClear();
|
||||
|
||||
act(() => {
|
||||
mockVariableItemCallbacks.onValueUpdate?.(
|
||||
'env',
|
||||
'env-id',
|
||||
'production',
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
expect(updateStoreSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
dashboardId: 'dash-1',
|
||||
variables: expect.objectContaining({
|
||||
env: expect.objectContaining({
|
||||
selectedValue: 'production',
|
||||
allSelected: false,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('calls enqueueDescendantsOfVariable synchronously without a timer', () => {
|
||||
jest.useFakeTimers();
|
||||
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({ name: 'env', id: 'env-id', order: 0 }),
|
||||
},
|
||||
});
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
act(() => {
|
||||
mockVariableItemCallbacks.onValueUpdate?.(
|
||||
'env',
|
||||
'env-id',
|
||||
'production',
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
// Must be called immediately — no timer advancement needed
|
||||
expect(enqueueDescendantsOfVariable).toHaveBeenCalledWith('env');
|
||||
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
it('propagates allSelected and haveCustomValuesSelected to the store', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({
|
||||
name: 'env',
|
||||
id: 'env-id',
|
||||
order: 0,
|
||||
multiSelect: true,
|
||||
showALLOption: true,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
updateStoreSpy.mockClear();
|
||||
|
||||
act(() => {
|
||||
mockVariableItemCallbacks.onValueUpdate?.(
|
||||
'env',
|
||||
'env-id',
|
||||
['production', 'staging'],
|
||||
true,
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
expect(updateStoreSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
variables: expect.objectContaining({
|
||||
env: expect.objectContaining({
|
||||
selectedValue: ['production', 'staging'],
|
||||
allSelected: true,
|
||||
haveCustomValuesSelected: false,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -0,0 +1,275 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { act, render, waitFor } from '@testing-library/react';
|
||||
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
|
||||
import { variableFetchStore } from 'providers/Dashboard/store/variableFetchStore';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import QueryVariableInput from '../QueryVariableInput';
|
||||
|
||||
jest.mock('api/dashboard/variables/dashboardVariablesQuery');
|
||||
|
||||
jest.mock('react-redux', () => ({
|
||||
...jest.requireActual('react-redux'),
|
||||
useSelector: jest.fn().mockReturnValue({ minTime: 1000, maxTime: 2000 }),
|
||||
}));
|
||||
|
||||
function createTestQueryClient(): QueryClient {
|
||||
return new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false, refetchOnWindowFocus: false },
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function Wrapper({
|
||||
children,
|
||||
queryClient,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
queryClient: QueryClient;
|
||||
}): JSX.Element {
|
||||
return (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
}
|
||||
|
||||
function createVariable(
|
||||
overrides: Partial<IDashboardVariable> = {},
|
||||
): IDashboardVariable {
|
||||
return {
|
||||
id: 'env-id',
|
||||
name: 'env',
|
||||
description: '',
|
||||
type: 'QUERY',
|
||||
sort: 'DISABLED',
|
||||
showALLOption: false,
|
||||
multiSelect: false,
|
||||
order: 0,
|
||||
queryValue: 'SELECT env FROM table',
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
/** Put the named variable into 'loading' state so useQuery fires on mount */
|
||||
function setVariableLoading(name: string): void {
|
||||
variableFetchStore.update((draft) => {
|
||||
draft.states[name] = 'loading';
|
||||
draft.cycleIds[name] = (draft.cycleIds[name] || 0) + 1;
|
||||
});
|
||||
}
|
||||
|
||||
function resetFetchStore(): void {
|
||||
variableFetchStore.set(() => ({
|
||||
states: {},
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
}));
|
||||
}
|
||||
|
||||
describe('QueryVariableInput - getOptions logic', () => {
|
||||
const mockOnValueUpdate = jest.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
resetFetchStore();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
resetFetchStore();
|
||||
});
|
||||
|
||||
it('applies default value (first option) when selectedValue is empty on first load', async () => {
|
||||
(dashboardVariablesQuery as jest.Mock).mockResolvedValue({
|
||||
statusCode: 200,
|
||||
payload: { variableValues: ['production', 'staging', 'dev'] },
|
||||
});
|
||||
|
||||
const variable = createVariable({ selectedValue: undefined });
|
||||
setVariableLoading('env');
|
||||
|
||||
const queryClient = createTestQueryClient();
|
||||
render(
|
||||
<Wrapper queryClient={queryClient}>
|
||||
<QueryVariableInput
|
||||
variableData={variable}
|
||||
existingVariables={{ 'env-id': variable }}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
/>
|
||||
</Wrapper>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockOnValueUpdate).toHaveBeenCalledWith(
|
||||
'env',
|
||||
'env-id',
|
||||
'production', // first option by default
|
||||
false,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('keeps existing selectedValue when it is present in new options', async () => {
|
||||
(dashboardVariablesQuery as jest.Mock).mockResolvedValue({
|
||||
statusCode: 200,
|
||||
payload: { variableValues: ['production', 'staging'] },
|
||||
});
|
||||
|
||||
const variable = createVariable({ selectedValue: 'staging' });
|
||||
setVariableLoading('env');
|
||||
|
||||
const queryClient = createTestQueryClient();
|
||||
render(
|
||||
<Wrapper queryClient={queryClient}>
|
||||
<QueryVariableInput
|
||||
variableData={variable}
|
||||
existingVariables={{ 'env-id': variable }}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
/>
|
||||
</Wrapper>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockOnValueUpdate).toHaveBeenCalledWith(
|
||||
'env',
|
||||
'env-id',
|
||||
'staging',
|
||||
false,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('selects all new options when allSelected=true and value is missing from new options', async () => {
|
||||
(dashboardVariablesQuery as jest.Mock).mockResolvedValue({
|
||||
statusCode: 200,
|
||||
payload: { variableValues: ['production', 'staging'] },
|
||||
});
|
||||
|
||||
const variable = createVariable({
|
||||
selectedValue: ['old-env'],
|
||||
allSelected: true,
|
||||
multiSelect: true,
|
||||
showALLOption: true,
|
||||
});
|
||||
setVariableLoading('env');
|
||||
|
||||
const queryClient = createTestQueryClient();
|
||||
render(
|
||||
<Wrapper queryClient={queryClient}>
|
||||
<QueryVariableInput
|
||||
variableData={variable}
|
||||
existingVariables={{ 'env-id': variable }}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
/>
|
||||
</Wrapper>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockOnValueUpdate).toHaveBeenCalledWith(
|
||||
'env',
|
||||
'env-id',
|
||||
['production', 'staging'],
|
||||
true,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('does not call onValueUpdate a second time when options have not changed', async () => {
|
||||
const mockQueryFn = jest.fn().mockResolvedValue({
|
||||
statusCode: 200,
|
||||
payload: { variableValues: ['production', 'staging'] },
|
||||
});
|
||||
(dashboardVariablesQuery as jest.Mock).mockImplementation(mockQueryFn);
|
||||
|
||||
const variable = createVariable({ selectedValue: 'production' });
|
||||
setVariableLoading('env');
|
||||
|
||||
const queryClient = createTestQueryClient();
|
||||
render(
|
||||
<Wrapper queryClient={queryClient}>
|
||||
<QueryVariableInput
|
||||
variableData={variable}
|
||||
existingVariables={{ 'env-id': variable }}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
/>
|
||||
</Wrapper>,
|
||||
);
|
||||
|
||||
// Wait for first fetch and onValueUpdate call
|
||||
await waitFor(() => {
|
||||
expect(mockOnValueUpdate).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
mockOnValueUpdate.mockClear();
|
||||
|
||||
// Trigger a second fetch cycle with the same API response
|
||||
act(() => {
|
||||
variableFetchStore.update((draft) => {
|
||||
draft.states['env'] = 'revalidating';
|
||||
draft.cycleIds['env'] = (draft.cycleIds['env'] || 0) + 1;
|
||||
});
|
||||
});
|
||||
|
||||
// Wait for second query to fire
|
||||
await waitFor(() => {
|
||||
expect(mockQueryFn).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
// Options are unchanged, so onValueUpdate must not fire again
|
||||
expect(mockOnValueUpdate).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('does not call onValueUpdate when API returns a non-array response', async () => {
|
||||
(dashboardVariablesQuery as jest.Mock).mockResolvedValue({
|
||||
statusCode: 200,
|
||||
payload: { variableValues: null },
|
||||
});
|
||||
|
||||
const variable = createVariable({ selectedValue: 'production' });
|
||||
setVariableLoading('env');
|
||||
|
||||
const queryClient = createTestQueryClient();
|
||||
render(
|
||||
<Wrapper queryClient={queryClient}>
|
||||
<QueryVariableInput
|
||||
variableData={variable}
|
||||
existingVariables={{ 'env-id': variable }}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
/>
|
||||
</Wrapper>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(dashboardVariablesQuery).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
expect(mockOnValueUpdate).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('does not fire the query when variableData.name is empty', () => {
|
||||
(dashboardVariablesQuery as jest.Mock).mockResolvedValue({
|
||||
statusCode: 200,
|
||||
payload: { variableValues: ['production'] },
|
||||
});
|
||||
|
||||
// Variable with no name — useVariableFetchState will be called with ''
|
||||
// and the query key will have an empty name, leaving it disabled
|
||||
const variable = createVariable({ name: '' });
|
||||
// Note: we do NOT put it in 'loading' state since name is empty
|
||||
// (no variableFetchStore entry for '' means isVariableFetching=false)
|
||||
|
||||
const queryClient = createTestQueryClient();
|
||||
render(
|
||||
<Wrapper queryClient={queryClient}>
|
||||
<QueryVariableInput
|
||||
variableData={variable}
|
||||
existingVariables={{ 'env-id': variable }}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
/>
|
||||
</Wrapper>,
|
||||
);
|
||||
|
||||
expect(dashboardVariablesQuery).not.toHaveBeenCalled();
|
||||
expect(mockOnValueUpdate).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -46,6 +46,9 @@ interface UseDashboardVariableSelectHelperReturn {
|
||||
applyDefaultIfNeeded: (
|
||||
overrideOptions?: (string | number | boolean)[],
|
||||
) => void;
|
||||
getDefaultValue: (
|
||||
overrideOptions?: (string | number | boolean)[],
|
||||
) => string | string[] | undefined;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
@@ -248,5 +251,6 @@ export function useDashboardVariableSelectHelper({
|
||||
defaultValue,
|
||||
onChange,
|
||||
applyDefaultIfNeeded,
|
||||
getDefaultValue,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useCallback } from 'react';
|
||||
import { useNavigate } from 'react-router-dom-v5-compat';
|
||||
import { Button } from 'antd';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import { LayoutGrid } from 'lucide-react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { DashboardData } from 'types/api/dashboard/getAll';
|
||||
@@ -11,8 +11,8 @@ import { Base64Icons } from '../../DashboardSettings/General/utils';
|
||||
import './DashboardBreadcrumbs.styles.scss';
|
||||
|
||||
function DashboardBreadcrumbs(): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const { selectedDashboard, listSortOrder } = useDashboard();
|
||||
const navigate = useNavigate();
|
||||
const { selectedDashboard } = useDashboard();
|
||||
|
||||
const selectedData = selectedDashboard
|
||||
? {
|
||||
@@ -24,15 +24,12 @@ function DashboardBreadcrumbs(): JSX.Element {
|
||||
const { title = '', image = Base64Icons[0] } = selectedData || {};
|
||||
|
||||
const goToListPage = useCallback(() => {
|
||||
const urlParams = new URLSearchParams();
|
||||
urlParams.set('columnKey', listSortOrder.columnKey as string);
|
||||
urlParams.set('order', listSortOrder.order as string);
|
||||
urlParams.set('page', listSortOrder.pagination as string);
|
||||
urlParams.set('search', listSortOrder.search as string);
|
||||
|
||||
const generatedUrl = `${ROUTES.ALL_DASHBOARD}?${urlParams.toString()}`;
|
||||
safeNavigate(generatedUrl);
|
||||
}, [listSortOrder, safeNavigate]);
|
||||
if (window.history.length > 1) {
|
||||
navigate(-1);
|
||||
} else {
|
||||
navigate(ROUTES.ALL_DASHBOARD);
|
||||
}
|
||||
}, [navigate]);
|
||||
|
||||
return (
|
||||
<div className="dashboard-breadcrumbs">
|
||||
|
||||
@@ -172,23 +172,51 @@ function ExplorerOptions({
|
||||
const { user } = useAppContext();
|
||||
|
||||
const handleConditionalQueryModification = useCallback(
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
(defaultQuery: Query | null): string => {
|
||||
const queryToUse = defaultQuery || query;
|
||||
if (!queryToUse) {
|
||||
throw new Error('No query provided');
|
||||
}
|
||||
if (
|
||||
queryToUse?.builder?.queryData?.[0]?.aggregateOperator !==
|
||||
StringOperators.NOOP
|
||||
StringOperators.NOOP &&
|
||||
sourcepage !== DataSource.LOGS
|
||||
) {
|
||||
return JSON.stringify(queryToUse);
|
||||
}
|
||||
|
||||
// Modify aggregateOperator to count, as noop is not supported in alerts
|
||||
// Convert NOOP to COUNT for alerts and strip orderBy for logs
|
||||
const modifiedQuery = cloneDeep(queryToUse);
|
||||
if (modifiedQuery && modifiedQuery.builder?.queryData) {
|
||||
modifiedQuery.builder.queryData = modifiedQuery.builder.queryData.map(
|
||||
(item) => {
|
||||
const updatedItem = { ...item };
|
||||
|
||||
modifiedQuery.builder.queryData[0].aggregateOperator = StringOperators.COUNT;
|
||||
if (updatedItem.aggregateOperator === StringOperators.NOOP) {
|
||||
updatedItem.aggregateOperator = StringOperators.COUNT;
|
||||
}
|
||||
|
||||
return JSON.stringify(modifiedQuery);
|
||||
// Alerts do not support order by on logs explorer queries
|
||||
if (sourcepage === DataSource.LOGS && panelType === PANEL_TYPES.LIST) {
|
||||
updatedItem.orderBy = [];
|
||||
}
|
||||
|
||||
return updatedItem;
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
return JSON.stringify(modifiedQuery);
|
||||
} catch (err) {
|
||||
throw new Error(
|
||||
'Failed to stringify modified query: ' +
|
||||
(err instanceof Error ? err.message : String(err)),
|
||||
);
|
||||
}
|
||||
},
|
||||
[query],
|
||||
[panelType, query, sourcepage],
|
||||
);
|
||||
|
||||
const onCreateAlertsHandler = useCallback(
|
||||
@@ -757,9 +785,9 @@ function ExplorerOptions({
|
||||
);
|
||||
}, [
|
||||
disabled,
|
||||
query,
|
||||
isOneChartPerQuery,
|
||||
onCreateAlertsHandler,
|
||||
query,
|
||||
splitedQueries,
|
||||
]);
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { MOCK_QUERY } from 'container/QueryTable/Drilldown/__tests__/mockTableData';
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
@@ -15,6 +16,11 @@ import { getExplorerToolBarVisibility } from '../utils';
|
||||
// Mock dependencies
|
||||
jest.mock('hooks/dashboard/useUpdateDashboard');
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useHistory: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('../utils', () => ({
|
||||
getExplorerToolBarVisibility: jest.fn(),
|
||||
generateRGBAFromHex: jest.fn(() => 'rgba(0, 0, 0, 0.08)'),
|
||||
@@ -29,6 +35,7 @@ const mockGetExplorerToolBarVisibility = jest.mocked(
|
||||
);
|
||||
|
||||
const mockUseUpdateDashboard = jest.mocked(useUpdateDashboard);
|
||||
const mockUseHistory = jest.mocked(useHistory);
|
||||
|
||||
// Mock data
|
||||
const TEST_QUERY_ID = 'test-query-id';
|
||||
@@ -103,7 +110,6 @@ describe('ExplorerOptionWrapper', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockGetExplorerToolBarVisibility.mockReturnValue(true);
|
||||
|
||||
// Mock useUpdateDashboard to return a mutation object
|
||||
mockUseUpdateDashboard.mockReturnValue(({
|
||||
mutate: jest.fn(),
|
||||
@@ -117,6 +123,28 @@ describe('ExplorerOptionWrapper', () => {
|
||||
} as unknown) as ReturnType<typeof useUpdateDashboard>);
|
||||
});
|
||||
|
||||
it('should navigate to alert creation page when "Create an Alert" is clicked in logs-explorer', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
const mockPush = jest.fn();
|
||||
mockUseHistory.mockReturnValue(({
|
||||
push: mockPush,
|
||||
} as unknown) as ReturnType<typeof useHistory>);
|
||||
|
||||
renderExplorerOptionWrapper({ sourcepage: DataSource.LOGS });
|
||||
|
||||
const createAlertButton = screen.getByRole('button', {
|
||||
name: 'Create an Alert',
|
||||
});
|
||||
await user.click(createAlertButton);
|
||||
|
||||
expect(mockPush).toHaveBeenCalledTimes(1);
|
||||
const calledWith = mockPush.mock.calls[0][0] as string;
|
||||
const [path, search = ''] = calledWith.split('?');
|
||||
expect(path).toBe('/alerts/new');
|
||||
const params = new URLSearchParams(search);
|
||||
expect(params.has('compositeQuery')).toBe(true);
|
||||
});
|
||||
|
||||
describe('onExport functionality', () => {
|
||||
it('should call onExport when New Dashboard button is clicked in export modal', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
@@ -45,6 +45,7 @@ import {
|
||||
} from 'container/DashboardContainer/DashboardDescription/utils';
|
||||
import { Base64Icons } from 'container/DashboardContainer/DashboardSettings/General/utils';
|
||||
import dayjs from 'dayjs';
|
||||
import { useDashboardListSortOrder } from 'hooks/dashboard/useDashboardListSortOrder';
|
||||
import { useGetAllDashboard } from 'hooks/dashboard/useGetAllDashboard';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
@@ -76,7 +77,6 @@ import {
|
||||
// see more: https://github.com/lucide-icons/lucide/issues/94
|
||||
import { handleContactSupport } from 'pages/Integrations/utils';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import {
|
||||
@@ -104,22 +104,19 @@ function DashboardsList(): JSX.Element {
|
||||
const {
|
||||
data: dashboardListResponse,
|
||||
isLoading: isDashboardListLoading,
|
||||
isRefetching: isDashboardListRefetching,
|
||||
isFetching: isDashboardListFetching,
|
||||
error: dashboardFetchError,
|
||||
refetch: refetchDashboardList,
|
||||
} = useGetAllDashboard();
|
||||
|
||||
const { user } = useAppContext();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const {
|
||||
listSortOrder: sortOrder,
|
||||
setListSortOrder: setSortOrder,
|
||||
} = useDashboard();
|
||||
const { listSortOrder, setListSortOrder } = useDashboardListSortOrder();
|
||||
|
||||
const { isCloudUser: isCloudUserVal } = useGetTenantLicense();
|
||||
|
||||
const [searchString, setSearchString] = useState<string>(
|
||||
sortOrder.search || '',
|
||||
listSortOrder.search || '',
|
||||
);
|
||||
const [action, createNewDashboard] = useComponentPermission(
|
||||
['action', 'create_new_dashboards'],
|
||||
@@ -139,7 +136,6 @@ function DashboardsList(): JSX.Element {
|
||||
] = useState<boolean>(false);
|
||||
|
||||
const [uploadedGrafana, setUploadedGrafana] = useState<boolean>(false);
|
||||
const [isFilteringDashboards, setIsFilteringDashboards] = useState(false);
|
||||
const [isConfigureMetadataOpen, setIsConfigureMetadata] = useState<boolean>(
|
||||
false,
|
||||
);
|
||||
@@ -187,75 +183,33 @@ function DashboardsList(): JSX.Element {
|
||||
}
|
||||
}
|
||||
|
||||
const [dashboards, setDashboards] = useState<Dashboard[]>();
|
||||
|
||||
const sortDashboardsByCreatedAt = (dashboards: Dashboard[]): void => {
|
||||
const sortedDashboards = dashboards.sort(
|
||||
(a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime(),
|
||||
);
|
||||
setDashboards(sortedDashboards);
|
||||
};
|
||||
|
||||
const sortDashboardsByUpdatedAt = (dashboards: Dashboard[]): void => {
|
||||
const sortedDashboards = dashboards.sort(
|
||||
(a, b) => new Date(b.updatedAt).getTime() - new Date(a.updatedAt).getTime(),
|
||||
);
|
||||
setDashboards(sortedDashboards);
|
||||
};
|
||||
|
||||
const sortHandle = (key: string): void => {
|
||||
if (!dashboards) {
|
||||
return;
|
||||
}
|
||||
if (key === 'createdAt') {
|
||||
sortDashboardsByCreatedAt(dashboards);
|
||||
setSortOrder({
|
||||
columnKey: 'createdAt',
|
||||
order: 'descend',
|
||||
pagination: sortOrder.pagination || '1',
|
||||
search: sortOrder.search || '',
|
||||
});
|
||||
} else if (key === 'updatedAt') {
|
||||
sortDashboardsByUpdatedAt(dashboards);
|
||||
setSortOrder({
|
||||
columnKey: 'updatedAt',
|
||||
order: 'descend',
|
||||
pagination: sortOrder.pagination || '1',
|
||||
search: sortOrder.search || '',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
function handlePageSizeUpdate(page: number): void {
|
||||
setSortOrder({ ...sortOrder, pagination: String(page) });
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
const filteredDashboards = filterDashboard(
|
||||
const dashboards = useMemo((): Dashboard[] => {
|
||||
const filtered = filterDashboard(
|
||||
searchString,
|
||||
dashboardListResponse?.data || [],
|
||||
);
|
||||
if (sortOrder.columnKey === 'updatedAt') {
|
||||
sortDashboardsByUpdatedAt(filteredDashboards || []);
|
||||
} else if (sortOrder.columnKey === 'createdAt') {
|
||||
sortDashboardsByCreatedAt(filteredDashboards || []);
|
||||
} else if (sortOrder.columnKey === 'null') {
|
||||
setSortOrder({
|
||||
columnKey: 'updatedAt',
|
||||
order: 'descend',
|
||||
pagination: sortOrder.pagination || '1',
|
||||
search: sortOrder.search || '',
|
||||
});
|
||||
sortDashboardsByUpdatedAt(filteredDashboards || []);
|
||||
if (listSortOrder.columnKey === 'createdAt') {
|
||||
return filtered.sort(
|
||||
(a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime(),
|
||||
);
|
||||
}
|
||||
}, [
|
||||
dashboardListResponse,
|
||||
searchString,
|
||||
setSortOrder,
|
||||
sortOrder.columnKey,
|
||||
sortOrder.pagination,
|
||||
sortOrder.search,
|
||||
]);
|
||||
return filtered.sort(
|
||||
(a, b) => new Date(b.updatedAt).getTime() - new Date(a.updatedAt).getTime(),
|
||||
);
|
||||
}, [dashboardListResponse?.data, searchString, listSortOrder.columnKey]);
|
||||
|
||||
const sortHandle = (key: string): void => {
|
||||
setListSortOrder({
|
||||
columnKey: key,
|
||||
order: 'descend',
|
||||
pagination: listSortOrder.pagination || '1',
|
||||
search: listSortOrder.search || '',
|
||||
});
|
||||
};
|
||||
|
||||
function handlePageSizeUpdate(page: number): void {
|
||||
setListSortOrder({ ...listSortOrder, pagination: String(page) });
|
||||
}
|
||||
|
||||
const [newDashboardState, setNewDashboardState] = useState({
|
||||
loading: false,
|
||||
@@ -265,26 +219,25 @@ function DashboardsList(): JSX.Element {
|
||||
|
||||
const { showErrorModal } = useErrorModal();
|
||||
|
||||
const data: Data[] =
|
||||
dashboards?.map((e) => ({
|
||||
createdAt: e.createdAt,
|
||||
description: e.data.description || '',
|
||||
id: e.id,
|
||||
lastUpdatedTime: e.updatedAt,
|
||||
name: e.data.title,
|
||||
tags: e.data.tags || [],
|
||||
key: e.id,
|
||||
createdBy: e.createdBy,
|
||||
isLocked: !!e.locked || false,
|
||||
lastUpdatedBy: e.updatedBy,
|
||||
image: e.data.image || Base64Icons[0],
|
||||
variables: e.data.variables,
|
||||
widgets: e.data.widgets,
|
||||
layout: e.data.layout,
|
||||
panelMap: e.data.panelMap,
|
||||
version: e.data.version,
|
||||
refetchDashboardList,
|
||||
})) || [];
|
||||
const data: Data[] = dashboards.map((e) => ({
|
||||
createdAt: e.createdAt,
|
||||
description: e.data.description || '',
|
||||
id: e.id,
|
||||
lastUpdatedTime: e.updatedAt,
|
||||
name: e.data.title,
|
||||
tags: e.data.tags || [],
|
||||
key: e.id,
|
||||
createdBy: e.createdBy,
|
||||
isLocked: !!e.locked || false,
|
||||
lastUpdatedBy: e.updatedBy,
|
||||
image: e.data.image || Base64Icons[0],
|
||||
variables: e.data.variables,
|
||||
widgets: e.data.widgets,
|
||||
layout: e.data.layout,
|
||||
panelMap: e.data.panelMap,
|
||||
version: e.data.version,
|
||||
refetchDashboardList,
|
||||
}));
|
||||
|
||||
const onNewDashboardHandler = useCallback(async () => {
|
||||
try {
|
||||
@@ -324,16 +277,9 @@ function DashboardsList(): JSX.Element {
|
||||
};
|
||||
|
||||
const handleSearch = (event: ChangeEvent<HTMLInputElement>): void => {
|
||||
setIsFilteringDashboards(true);
|
||||
const searchText = (event as React.BaseSyntheticEvent)?.target?.value || '';
|
||||
const filteredDashboards = filterDashboard(
|
||||
searchText,
|
||||
dashboardListResponse?.data || [],
|
||||
);
|
||||
setDashboards(filteredDashboards);
|
||||
setIsFilteringDashboards(false);
|
||||
setSearchString(searchText);
|
||||
setSortOrder({ ...sortOrder, search: searchText });
|
||||
setListSortOrder({ ...listSortOrder, search: searchText });
|
||||
};
|
||||
|
||||
const [state, setCopy] = useCopyToClipboard();
|
||||
@@ -671,8 +617,8 @@ function DashboardsList(): JSX.Element {
|
||||
showTotal: showPaginationItem,
|
||||
showSizeChanger: false,
|
||||
onChange: (page: any): void => handlePageSizeUpdate(page),
|
||||
current: Number(sortOrder.pagination),
|
||||
defaultCurrent: Number(sortOrder.pagination) || 1,
|
||||
current: Number(listSortOrder.pagination),
|
||||
defaultCurrent: Number(listSortOrder.pagination) || 1,
|
||||
hideOnSinglePage: true,
|
||||
};
|
||||
|
||||
@@ -710,9 +656,7 @@ function DashboardsList(): JSX.Element {
|
||||
)}
|
||||
</div>
|
||||
|
||||
{isDashboardListLoading ||
|
||||
isFilteringDashboards ||
|
||||
isDashboardListRefetching ? (
|
||||
{isDashboardListFetching ? (
|
||||
<div className="loading-dashboard-details">
|
||||
<Skeleton.Input active size="large" className="skeleton-1" />
|
||||
<Skeleton.Input active size="large" className="skeleton-1" />
|
||||
@@ -749,7 +693,7 @@ function DashboardsList(): JSX.Element {
|
||||
<ArrowUpRight size={16} className="learn-more-arrow" />
|
||||
</section>
|
||||
</div>
|
||||
) : dashboards?.length === 0 && !searchString ? (
|
||||
) : dashboards.length === 0 && !searchString ? (
|
||||
<div className="dashboard-empty-state">
|
||||
<img
|
||||
src="/Icons/dashboards.svg"
|
||||
@@ -831,7 +775,7 @@ function DashboardsList(): JSX.Element {
|
||||
)}
|
||||
</div>
|
||||
|
||||
{dashboards?.length === 0 ? (
|
||||
{dashboards.length === 0 ? (
|
||||
<div className="no-search">
|
||||
<img src="/Icons/emptyState.svg" alt="img" className="img" />
|
||||
<Typography.Text className="text">
|
||||
@@ -860,7 +804,7 @@ function DashboardsList(): JSX.Element {
|
||||
data-testid="sort-by-last-created"
|
||||
>
|
||||
Last created
|
||||
{sortOrder.columnKey === 'createdAt' && <Check size={14} />}
|
||||
{listSortOrder.columnKey === 'createdAt' && <Check size={14} />}
|
||||
</Button>
|
||||
<Button
|
||||
type="text"
|
||||
@@ -869,7 +813,7 @@ function DashboardsList(): JSX.Element {
|
||||
data-testid="sort-by-last-updated"
|
||||
>
|
||||
Last updated
|
||||
{sortOrder.columnKey === 'updatedAt' && <Check size={14} />}
|
||||
{listSortOrder.columnKey === 'updatedAt' && <Check size={14} />}
|
||||
</Button>
|
||||
</div>
|
||||
}
|
||||
@@ -911,11 +855,7 @@ function DashboardsList(): JSX.Element {
|
||||
columns={columns}
|
||||
dataSource={data}
|
||||
showSorterTooltip
|
||||
loading={
|
||||
isDashboardListLoading ||
|
||||
isFilteringDashboards ||
|
||||
isDashboardListRefetching
|
||||
}
|
||||
loading={isDashboardListFetching}
|
||||
showHeader={false}
|
||||
pagination={paginationConfig}
|
||||
/>
|
||||
@@ -964,12 +904,12 @@ function DashboardsList(): JSX.Element {
|
||||
<div className="configure-preview">
|
||||
<section className="header">
|
||||
<img
|
||||
src={dashboards?.[0]?.data?.image || Base64Icons[0]}
|
||||
src={dashboards[0]?.data?.image || Base64Icons[0]}
|
||||
alt="dashboard-image"
|
||||
style={{ height: '14px', width: '14px' }}
|
||||
/>
|
||||
<Typography.Text className="title">
|
||||
{dashboards?.[0]?.data?.title}
|
||||
{dashboards[0]?.data?.title}
|
||||
</Typography.Text>
|
||||
</section>
|
||||
<section className="details">
|
||||
@@ -977,16 +917,16 @@ function DashboardsList(): JSX.Element {
|
||||
{visibleColumns.createdAt && (
|
||||
<Typography.Text className="formatted-time">
|
||||
<CalendarClock size={14} />
|
||||
{getFormattedTime(dashboards?.[0] as Dashboard, 'created_at')}
|
||||
{getFormattedTime(dashboards[0] as Dashboard, 'created_at')}
|
||||
</Typography.Text>
|
||||
)}
|
||||
{visibleColumns.createdBy && (
|
||||
<div className="user">
|
||||
<Typography.Text className="user-tag">
|
||||
{dashboards?.[0]?.createdBy?.substring(0, 1).toUpperCase()}
|
||||
{dashboards[0]?.createdBy?.substring(0, 1).toUpperCase()}
|
||||
</Typography.Text>
|
||||
<Typography.Text className="dashboard-created-by">
|
||||
{dashboards?.[0]?.createdBy}
|
||||
{dashboards[0]?.createdBy}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
)}
|
||||
@@ -995,16 +935,16 @@ function DashboardsList(): JSX.Element {
|
||||
{visibleColumns.updatedAt && (
|
||||
<Typography.Text className="formatted-time">
|
||||
<CalendarClock size={14} />
|
||||
{onLastUpdated(dashboards?.[0]?.updatedAt || '')}
|
||||
{onLastUpdated(dashboards[0]?.updatedAt || '')}
|
||||
</Typography.Text>
|
||||
)}
|
||||
{visibleColumns.updatedBy && (
|
||||
<div className="user">
|
||||
<Typography.Text className="user-tag">
|
||||
{dashboards?.[0]?.updatedBy?.substring(0, 1).toUpperCase()}
|
||||
{dashboards[0]?.updatedBy?.substring(0, 1).toUpperCase()}
|
||||
</Typography.Text>
|
||||
<Typography.Text className="dashboard-created-by">
|
||||
{dashboards?.[0]?.updatedBy}
|
||||
{dashboards[0]?.updatedBy}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { ReactNode, useState } from 'react';
|
||||
import { ReactNode, useCallback, useEffect, useRef, useState } from 'react';
|
||||
import MEditor, { EditorProps, Monaco } from '@monaco-editor/react';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import type { InputRef } from 'antd';
|
||||
import {
|
||||
Button,
|
||||
Collapse,
|
||||
@@ -46,12 +47,23 @@ function Overview({
|
||||
handleChangeSelectedView,
|
||||
}: Props): JSX.Element {
|
||||
const [isWrapWord, setIsWrapWord] = useState<boolean>(true);
|
||||
const [isSearchVisible, setIsSearchVisible] = useState<boolean>(false);
|
||||
const [isSearchVisible, setIsSearchVisible] = useState<boolean>(true);
|
||||
const [isAttributesExpanded, setIsAttributesExpanded] = useState<boolean>(
|
||||
true,
|
||||
);
|
||||
const [fieldSearchInput, setFieldSearchInput] = useState<string>('');
|
||||
|
||||
const focusTimerRef = useRef<ReturnType<typeof setTimeout>>();
|
||||
|
||||
const searchInputRef = useCallback((node: InputRef | null) => {
|
||||
clearTimeout(focusTimerRef.current);
|
||||
if (node) {
|
||||
focusTimerRef.current = setTimeout(() => node.focus(), 100);
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => (): void => clearTimeout(focusTimerRef.current), []);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const options: EditorProps['options'] = {
|
||||
@@ -196,7 +208,7 @@ function Overview({
|
||||
<>
|
||||
{isSearchVisible && (
|
||||
<Input
|
||||
autoFocus
|
||||
ref={searchInputRef}
|
||||
placeholder="Search for a field..."
|
||||
className="search-input"
|
||||
value={fieldSearchInput}
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
|
||||
import { getColorsForSeverityLabels, isRedLike } from '../utils';
|
||||
|
||||
describe('getColorsForSeverityLabels', () => {
|
||||
it('should return slate for blank labels', () => {
|
||||
expect(getColorsForSeverityLabels('', 0)).toBe(Color.BG_SLATE_300);
|
||||
expect(getColorsForSeverityLabels(' ', 0)).toBe(Color.BG_SLATE_300);
|
||||
});
|
||||
|
||||
it('should return correct colors for known severity variants', () => {
|
||||
expect(getColorsForSeverityLabels('INFO', 0)).toBe(Color.BG_ROBIN_600);
|
||||
expect(getColorsForSeverityLabels('ERROR', 0)).toBe(Color.BG_CHERRY_600);
|
||||
expect(getColorsForSeverityLabels('WARN', 0)).toBe(Color.BG_AMBER_600);
|
||||
expect(getColorsForSeverityLabels('DEBUG', 0)).toBe(Color.BG_AQUA_600);
|
||||
expect(getColorsForSeverityLabels('TRACE', 0)).toBe(Color.BG_FOREST_600);
|
||||
expect(getColorsForSeverityLabels('FATAL', 0)).toBe(Color.BG_SAKURA_600);
|
||||
});
|
||||
|
||||
it('should return non-red colors for unrecognized labels at any index', () => {
|
||||
for (let i = 0; i < 30; i++) {
|
||||
const color = getColorsForSeverityLabels('4', i);
|
||||
expect(isRedLike(color)).toBe(false);
|
||||
}
|
||||
});
|
||||
|
||||
it('should return non-red colors for numeric severity text', () => {
|
||||
const numericLabels = ['1', '2', '4', '9', '13', '17', '21'];
|
||||
numericLabels.forEach((label) => {
|
||||
const color = getColorsForSeverityLabels(label, 0);
|
||||
expect(isRedLike(color)).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,7 +1,16 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { themeColors } from 'constants/theme';
|
||||
import { colors } from 'lib/getRandomColor';
|
||||
|
||||
// Function to determine if a color is "red-like" based on its RGB values
|
||||
export function isRedLike(hex: string): boolean {
|
||||
const r = parseInt(hex.slice(1, 3), 16);
|
||||
const g = parseInt(hex.slice(3, 5), 16);
|
||||
const b = parseInt(hex.slice(5, 7), 16);
|
||||
return r > 180 && r > g * 1.4 && r > b * 1.4;
|
||||
}
|
||||
|
||||
const SAFE_FALLBACK_COLORS = colors.filter((c) => !isRedLike(c));
|
||||
|
||||
const SEVERITY_VARIANT_COLORS: Record<string, string> = {
|
||||
TRACE: Color.BG_FOREST_600,
|
||||
Trace: Color.BG_FOREST_500,
|
||||
@@ -67,8 +76,13 @@ export function getColorsForSeverityLabels(
|
||||
label: string,
|
||||
index: number,
|
||||
): string {
|
||||
// Check if we have a direct mapping for this severity variant
|
||||
const variantColor = SEVERITY_VARIANT_COLORS[label.trim()];
|
||||
const trimmed = label.trim();
|
||||
|
||||
if (!trimmed) {
|
||||
return Color.BG_SLATE_300;
|
||||
}
|
||||
|
||||
const variantColor = SEVERITY_VARIANT_COLORS[trimmed];
|
||||
if (variantColor) {
|
||||
return variantColor;
|
||||
}
|
||||
@@ -103,5 +117,8 @@ export function getColorsForSeverityLabels(
|
||||
return Color.BG_SAKURA_500;
|
||||
}
|
||||
|
||||
return colors[index % colors.length] || themeColors.red;
|
||||
return (
|
||||
SAFE_FALLBACK_COLORS[index % SAFE_FALLBACK_COLORS.length] ||
|
||||
Color.BG_SLATE_400
|
||||
);
|
||||
}
|
||||
|
||||
@@ -111,23 +111,19 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
|
||||
);
|
||||
|
||||
const itemContent = useCallback(
|
||||
(index: number, log: Record<string, unknown>): JSX.Element => {
|
||||
return (
|
||||
<div key={log.id as string}>
|
||||
<TableRow
|
||||
tableColumns={tableColumns}
|
||||
index={index}
|
||||
log={log}
|
||||
logs={tableViewProps.logs}
|
||||
hasActions
|
||||
fontSize={tableViewProps.fontSize}
|
||||
onShowLogDetails={onSetActiveLog}
|
||||
isActiveLog={activeLog?.id === log.id}
|
||||
onClearActiveLog={onCloseActiveLog}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
(index: number, log: Record<string, unknown>): JSX.Element => (
|
||||
<TableRow
|
||||
tableColumns={tableColumns}
|
||||
index={index}
|
||||
log={log}
|
||||
logs={tableViewProps.logs}
|
||||
hasActions
|
||||
fontSize={tableViewProps.fontSize}
|
||||
onShowLogDetails={onSetActiveLog}
|
||||
isActiveLog={activeLog?.id === log.id}
|
||||
onClearActiveLog={onCloseActiveLog}
|
||||
/>
|
||||
),
|
||||
[
|
||||
tableColumns,
|
||||
onSetActiveLog,
|
||||
@@ -143,7 +139,8 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
|
||||
{tableColumns
|
||||
.filter((column) => column.key)
|
||||
.map((column) => {
|
||||
const isDragColumn = column.key !== 'expand';
|
||||
const isDragColumn =
|
||||
column.key !== 'expand' && column.key !== 'state-indicator';
|
||||
|
||||
return (
|
||||
<TableHeaderCellStyled
|
||||
|
||||
@@ -18,8 +18,8 @@ jest.mock('lib/query/createTableColumnsFromQuery', () => ({
|
||||
jest.mock('container/NewWidget/utils', () => ({
|
||||
unitOptions: jest.fn(() => [
|
||||
{ value: 'none', label: 'None' },
|
||||
{ value: 'percent', label: 'Percent' },
|
||||
{ value: 'ms', label: 'Milliseconds' },
|
||||
{ value: '%', label: 'Percent (0 - 100)' },
|
||||
{ value: 'ms', label: 'Milliseconds (ms)' },
|
||||
]),
|
||||
}));
|
||||
|
||||
@@ -39,7 +39,7 @@ const defaultProps = {
|
||||
],
|
||||
thresholdTableOptions: 'cpu_usage',
|
||||
columnUnits: { cpu_usage: 'percent', memory_usage: 'bytes' },
|
||||
yAxisUnit: 'percent',
|
||||
yAxisUnit: '%',
|
||||
moveThreshold: jest.fn(),
|
||||
};
|
||||
|
||||
|
||||
@@ -1,99 +0,0 @@
|
||||
import { Dispatch, SetStateAction, useEffect, useState } from 'react';
|
||||
import { AutoComplete, Input, Typography } from 'antd';
|
||||
import { find } from 'lodash-es';
|
||||
|
||||
import { flattenedCategories } from './dataFormatCategories';
|
||||
|
||||
const findCategoryById = (
|
||||
searchValue: string,
|
||||
): Record<string, string> | undefined =>
|
||||
find(flattenedCategories, (option) => option.id === searchValue);
|
||||
const findCategoryByName = (
|
||||
searchValue: string,
|
||||
): Record<string, string> | undefined =>
|
||||
find(flattenedCategories, (option) => option.name === searchValue);
|
||||
|
||||
type OnSelectType = Dispatch<SetStateAction<string>> | ((val: string) => void);
|
||||
|
||||
/**
|
||||
* @deprecated Use DashboardYAxisUnitSelectorWrapper instead.
|
||||
*/
|
||||
function YAxisUnitSelector({
|
||||
value,
|
||||
onSelect,
|
||||
fieldLabel,
|
||||
handleClear,
|
||||
}: {
|
||||
value: string;
|
||||
onSelect: OnSelectType;
|
||||
fieldLabel: string;
|
||||
handleClear?: () => void;
|
||||
}): JSX.Element {
|
||||
const [inputValue, setInputValue] = useState('');
|
||||
|
||||
// Sync input value with the actual value prop
|
||||
useEffect(() => {
|
||||
const category = findCategoryById(value);
|
||||
setInputValue(category?.name || '');
|
||||
}, [value]);
|
||||
|
||||
const onSelectHandler = (selectedValue: string): void => {
|
||||
const category = findCategoryByName(selectedValue);
|
||||
if (category) {
|
||||
onSelect(category.id);
|
||||
setInputValue(selectedValue);
|
||||
}
|
||||
};
|
||||
|
||||
const onChangeHandler = (inputValue: string): void => {
|
||||
setInputValue(inputValue);
|
||||
// Clear the yAxisUnit if input is empty or doesn't match any option
|
||||
if (!inputValue) {
|
||||
onSelect('');
|
||||
}
|
||||
};
|
||||
|
||||
const onClearHandler = (): void => {
|
||||
setInputValue('');
|
||||
onSelect('');
|
||||
if (handleClear) {
|
||||
handleClear();
|
||||
}
|
||||
};
|
||||
|
||||
const options = flattenedCategories.map((options) => ({
|
||||
value: options.name,
|
||||
}));
|
||||
|
||||
return (
|
||||
<div className="y-axis-unit-selector">
|
||||
<Typography.Text className="heading">{fieldLabel}</Typography.Text>
|
||||
<AutoComplete
|
||||
style={{ width: '100%' }}
|
||||
rootClassName="y-axis-root-popover"
|
||||
options={options}
|
||||
allowClear
|
||||
value={inputValue}
|
||||
onChange={onChangeHandler}
|
||||
onClear={onClearHandler}
|
||||
onSelect={onSelectHandler}
|
||||
filterOption={(inputValue, option): boolean => {
|
||||
if (option) {
|
||||
return (
|
||||
option.value.toUpperCase().indexOf(inputValue.toUpperCase()) !== -1
|
||||
);
|
||||
}
|
||||
return false;
|
||||
}}
|
||||
>
|
||||
<Input placeholder="Unit" rootClassName="input" />
|
||||
</AutoComplete>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default YAxisUnitSelector;
|
||||
|
||||
YAxisUnitSelector.defaultProps = {
|
||||
handleClear: (): void => {},
|
||||
};
|
||||
@@ -1,240 +0,0 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { render, screen, waitFor } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
|
||||
import YAxisUnitSelector from '../YAxisUnitSelector';
|
||||
|
||||
// Mock the dataFormatCategories to have predictable test data
|
||||
jest.mock('../dataFormatCategories', () => ({
|
||||
flattenedCategories: [
|
||||
{ id: 'seconds', name: 'seconds (s)' },
|
||||
{ id: 'milliseconds', name: 'milliseconds (ms)' },
|
||||
{ id: 'hours', name: 'hours (h)' },
|
||||
{ id: 'minutes', name: 'minutes (m)' },
|
||||
],
|
||||
}));
|
||||
|
||||
const MOCK_SECONDS = 'seconds';
|
||||
const MOCK_MILLISECONDS = 'milliseconds';
|
||||
|
||||
describe('YAxisUnitSelector', () => {
|
||||
const defaultProps = {
|
||||
value: MOCK_SECONDS,
|
||||
onSelect: jest.fn(),
|
||||
fieldLabel: 'Y Axis Unit',
|
||||
handleClear: jest.fn(),
|
||||
};
|
||||
|
||||
let user: ReturnType<typeof userEvent.setup>;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
user = userEvent.setup();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('Rendering (Read) & (write)', () => {
|
||||
it('renders with correct field label', () => {
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value={defaultProps.value}
|
||||
onSelect={defaultProps.onSelect}
|
||||
fieldLabel={defaultProps.fieldLabel}
|
||||
handleClear={defaultProps.handleClear}
|
||||
/>,
|
||||
);
|
||||
expect(screen.getByText('Y Axis Unit')).toBeInTheDocument();
|
||||
const input = screen.getByRole('combobox');
|
||||
|
||||
expect(input).toHaveValue('seconds (s)');
|
||||
});
|
||||
|
||||
it('renders with custom field label', () => {
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value={defaultProps.value}
|
||||
onSelect={defaultProps.onSelect}
|
||||
fieldLabel="Custom Unit Label"
|
||||
handleClear={defaultProps.handleClear}
|
||||
/>,
|
||||
);
|
||||
expect(screen.getByText('Custom Unit Label')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('displays empty input when value prop is empty', () => {
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value=""
|
||||
onSelect={defaultProps.onSelect}
|
||||
fieldLabel={defaultProps.fieldLabel}
|
||||
handleClear={defaultProps.handleClear}
|
||||
/>,
|
||||
);
|
||||
expect(screen.getByDisplayValue('')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows placeholder text', () => {
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value={defaultProps.value}
|
||||
onSelect={defaultProps.onSelect}
|
||||
fieldLabel={defaultProps.fieldLabel}
|
||||
handleClear={defaultProps.handleClear}
|
||||
/>,
|
||||
);
|
||||
expect(screen.getByPlaceholderText('Unit')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('handles numeric input', async () => {
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value={defaultProps.value}
|
||||
onSelect={defaultProps.onSelect}
|
||||
fieldLabel={defaultProps.fieldLabel}
|
||||
handleClear={defaultProps.handleClear}
|
||||
/>,
|
||||
);
|
||||
const input = screen.getByRole('combobox');
|
||||
|
||||
await user.clear(input);
|
||||
await user.type(input, '12345');
|
||||
expect(input).toHaveValue('12345');
|
||||
});
|
||||
|
||||
it('handles mixed content input', async () => {
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value={defaultProps.value}
|
||||
onSelect={defaultProps.onSelect}
|
||||
fieldLabel={defaultProps.fieldLabel}
|
||||
handleClear={defaultProps.handleClear}
|
||||
/>,
|
||||
);
|
||||
const input = screen.getByRole('combobox');
|
||||
|
||||
await user.clear(input);
|
||||
await user.type(input, 'Test123!@#');
|
||||
expect(input).toHaveValue('Test123!@#');
|
||||
});
|
||||
});
|
||||
|
||||
describe('State Management', () => {
|
||||
it('syncs input value with value prop changes', async () => {
|
||||
const { rerender } = render(
|
||||
<YAxisUnitSelector
|
||||
value={defaultProps.value}
|
||||
onSelect={defaultProps.onSelect}
|
||||
fieldLabel={defaultProps.fieldLabel}
|
||||
handleClear={defaultProps.handleClear}
|
||||
/>,
|
||||
);
|
||||
const input = screen.getByRole('combobox');
|
||||
|
||||
// Initial value
|
||||
expect(input).toHaveValue('seconds (s)');
|
||||
|
||||
// Change value prop
|
||||
rerender(
|
||||
<YAxisUnitSelector
|
||||
value={MOCK_MILLISECONDS}
|
||||
onSelect={defaultProps.onSelect}
|
||||
fieldLabel={defaultProps.fieldLabel}
|
||||
handleClear={defaultProps.handleClear}
|
||||
/>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(input).toHaveValue('milliseconds (ms)');
|
||||
});
|
||||
});
|
||||
|
||||
it('handles empty value prop correctly', async () => {
|
||||
const { rerender } = render(
|
||||
<YAxisUnitSelector
|
||||
value={defaultProps.value}
|
||||
onSelect={defaultProps.onSelect}
|
||||
fieldLabel={defaultProps.fieldLabel}
|
||||
handleClear={defaultProps.handleClear}
|
||||
/>,
|
||||
);
|
||||
const input = screen.getByRole('combobox');
|
||||
|
||||
// Change to empty value
|
||||
rerender(
|
||||
<YAxisUnitSelector
|
||||
value=""
|
||||
onSelect={defaultProps.onSelect}
|
||||
fieldLabel={defaultProps.fieldLabel}
|
||||
handleClear={defaultProps.handleClear}
|
||||
/>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(input).toHaveValue('');
|
||||
});
|
||||
});
|
||||
|
||||
it('handles invalid value prop gracefully', async () => {
|
||||
const { rerender } = render(
|
||||
<YAxisUnitSelector
|
||||
value={defaultProps.value}
|
||||
onSelect={defaultProps.onSelect}
|
||||
fieldLabel={defaultProps.fieldLabel}
|
||||
handleClear={defaultProps.handleClear}
|
||||
/>,
|
||||
);
|
||||
const input = screen.getByRole('combobox');
|
||||
|
||||
// Change to invalid value
|
||||
rerender(
|
||||
<YAxisUnitSelector
|
||||
value="invalid_id"
|
||||
onSelect={defaultProps.onSelect}
|
||||
fieldLabel={defaultProps.fieldLabel}
|
||||
handleClear={defaultProps.handleClear}
|
||||
/>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(input).toHaveValue('');
|
||||
});
|
||||
});
|
||||
|
||||
it('maintains local state during typing', async () => {
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value={defaultProps.value}
|
||||
onSelect={defaultProps.onSelect}
|
||||
fieldLabel={defaultProps.fieldLabel}
|
||||
handleClear={defaultProps.handleClear}
|
||||
/>,
|
||||
);
|
||||
const input = screen.getByRole('combobox');
|
||||
|
||||
// first clear then type
|
||||
await user.clear(input);
|
||||
await user.type(input, 'test');
|
||||
expect(input).toHaveValue('test');
|
||||
|
||||
// Value prop change should not override local typing
|
||||
await act(async () => {
|
||||
// Simulate prop change
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value="bytes"
|
||||
onSelect={defaultProps.onSelect}
|
||||
fieldLabel={defaultProps.fieldLabel}
|
||||
handleClear={defaultProps.handleClear}
|
||||
/>,
|
||||
);
|
||||
});
|
||||
|
||||
// Local typing should be preserved
|
||||
expect(input).toHaveValue('test');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,613 +1,53 @@
|
||||
import { flattenDeep } from 'lodash-es';
|
||||
|
||||
import {
|
||||
AccelerationFormats,
|
||||
AngularFormats,
|
||||
AreaFormats,
|
||||
BooleanFormats,
|
||||
CategoryNames,
|
||||
ConcentrationFormats,
|
||||
CurrencyFormats,
|
||||
DataFormats,
|
||||
DataRateFormats,
|
||||
DataTypeCategories,
|
||||
DatetimeFormats,
|
||||
FlopsFormats,
|
||||
FlowFormats,
|
||||
ForceFormats,
|
||||
HashRateFormats,
|
||||
LengthFormats,
|
||||
MassFormats,
|
||||
MiscellaneousFormats,
|
||||
PowerElectricalFormats,
|
||||
PressureFormats,
|
||||
RadiationFormats,
|
||||
RotationSpeedFormats,
|
||||
TemperatureFormats,
|
||||
ThroughputFormats,
|
||||
TimeFormats,
|
||||
VelocityFormats,
|
||||
VolumeFormats,
|
||||
} from './types';
|
||||
UniversalUnitToGrafanaUnit,
|
||||
YAxisCategoryNames,
|
||||
} from 'components/YAxisUnitSelector/constants';
|
||||
import { YAxisSource } from 'components/YAxisUnitSelector/types';
|
||||
import { getYAxisCategories } from 'components/YAxisUnitSelector/utils';
|
||||
import { convertValue } from 'lib/getConvertedValue';
|
||||
|
||||
export const dataTypeCategories: DataTypeCategories = [
|
||||
{
|
||||
name: CategoryNames.Time,
|
||||
formats: [
|
||||
{ name: 'Hertz (1/s)', id: TimeFormats.Hertz },
|
||||
{ name: 'nanoseconds (ns)', id: TimeFormats.Nanoseconds },
|
||||
{ name: 'microseconds (µs)', id: TimeFormats.Microseconds },
|
||||
{ name: 'milliseconds (ms)', id: TimeFormats.Milliseconds },
|
||||
{ name: 'seconds (s)', id: TimeFormats.Seconds },
|
||||
{ name: 'minutes (m)', id: TimeFormats.Minutes },
|
||||
{ name: 'hours (h)', id: TimeFormats.Hours },
|
||||
{ name: 'days (d)', id: TimeFormats.Days },
|
||||
{ name: 'duration in ms (dtdurationms)', id: TimeFormats.DurationMs },
|
||||
{ name: 'duration in s (dtdurations)', id: TimeFormats.DurationS },
|
||||
{ name: 'duration in h:m:s (dthms)', id: TimeFormats.DurationHms },
|
||||
{ name: 'duration in d:h:m:s (dtdhms)', id: TimeFormats.DurationDhms },
|
||||
{ name: 'timeticks (timeticks)', id: TimeFormats.Timeticks },
|
||||
{ name: 'clock in ms (clockms)', id: TimeFormats.ClockMs },
|
||||
{ name: 'clock in s (clocks)', id: TimeFormats.ClockS },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Throughput,
|
||||
formats: [
|
||||
{ name: 'counts/sec (cps)', id: ThroughputFormats.CountsPerSec },
|
||||
{ name: 'ops/sec (ops)', id: ThroughputFormats.OpsPerSec },
|
||||
{ name: 'requests/sec (reqps)', id: ThroughputFormats.RequestsPerSec },
|
||||
{ name: 'reads/sec (rps)', id: ThroughputFormats.ReadsPerSec },
|
||||
{ name: 'writes/sec (wps)', id: ThroughputFormats.WritesPerSec },
|
||||
{ name: 'I/O operations/sec (iops)', id: ThroughputFormats.IOOpsPerSec },
|
||||
{ name: 'counts/min (cpm)', id: ThroughputFormats.CountsPerMin },
|
||||
{ name: 'ops/min (opm)', id: ThroughputFormats.OpsPerMin },
|
||||
{ name: 'reads/min (rpm)', id: ThroughputFormats.ReadsPerMin },
|
||||
{ name: 'writes/min (wpm)', id: ThroughputFormats.WritesPerMin },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Data,
|
||||
formats: [
|
||||
{ name: 'bytes(IEC)', id: DataFormats.BytesIEC },
|
||||
{ name: 'bytes(SI)', id: DataFormats.BytesSI },
|
||||
{ name: 'bits(IEC)', id: DataFormats.BitsIEC },
|
||||
{ name: 'bits(SI)', id: DataFormats.BitsSI },
|
||||
{ name: 'kibibytes', id: DataFormats.KibiBytes },
|
||||
{ name: 'kilobytes', id: DataFormats.KiloBytes },
|
||||
{ name: 'mebibytes', id: DataFormats.MebiBytes },
|
||||
{ name: 'megabytes', id: DataFormats.MegaBytes },
|
||||
{ name: 'gibibytes', id: DataFormats.GibiBytes },
|
||||
{ name: 'gigabytes', id: DataFormats.GigaBytes },
|
||||
{ name: 'tebibytes', id: DataFormats.TebiBytes },
|
||||
{ name: 'terabytes', id: DataFormats.TeraBytes },
|
||||
{ name: 'pebibytes', id: DataFormats.PebiBytes },
|
||||
{ name: 'petabytes', id: DataFormats.PetaBytes },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.DataRate,
|
||||
formats: [
|
||||
{ name: 'packets/sec', id: DataRateFormats.PacketsPerSec },
|
||||
{ name: 'bytes/sec(IEC)', id: DataRateFormats.BytesPerSecIEC },
|
||||
{ name: 'bytes/sec(SI)', id: DataRateFormats.BytesPerSecSI },
|
||||
{ name: 'bits/sec(IEC)', id: DataRateFormats.BitsPerSecIEC },
|
||||
{ name: 'bits/sec(SI)', id: DataRateFormats.BitsPerSecSI },
|
||||
{ name: 'kibibytes/sec', id: DataRateFormats.KibiBytesPerSec },
|
||||
{ name: 'kibibits/sec', id: DataRateFormats.KibiBitsPerSec },
|
||||
{ name: 'kilobytes/sec', id: DataRateFormats.KiloBytesPerSec },
|
||||
{ name: 'kilobits/sec', id: DataRateFormats.KiloBitsPerSec },
|
||||
{ name: 'mebibytes/sec', id: DataRateFormats.MebiBytesPerSec },
|
||||
{ name: 'mebibits/sec', id: DataRateFormats.MebiBitsPerSec },
|
||||
{ name: 'megabytes/sec', id: DataRateFormats.MegaBytesPerSec },
|
||||
{ name: 'megabits/sec', id: DataRateFormats.MegaBitsPerSec },
|
||||
{ name: 'gibibytes/sec', id: DataRateFormats.GibiBytesPerSec },
|
||||
{ name: 'gibibits/sec', id: DataRateFormats.GibiBitsPerSec },
|
||||
{ name: 'gigabytes/sec', id: DataRateFormats.GigaBytesPerSec },
|
||||
{ name: 'gigabits/sec', id: DataRateFormats.GigaBitsPerSec },
|
||||
{ name: 'tebibytes/sec', id: DataRateFormats.TebiBytesPerSec },
|
||||
{ name: 'tebibits/sec', id: DataRateFormats.TebiBitsPerSec },
|
||||
{ name: 'terabytes/sec', id: DataRateFormats.TeraBytesPerSec },
|
||||
{ name: 'terabits/sec', id: DataRateFormats.TeraBitsPerSec },
|
||||
{ name: 'pebibytes/sec', id: DataRateFormats.PebiBytesPerSec },
|
||||
{ name: 'pebibits/sec', id: DataRateFormats.PebiBitsPerSec },
|
||||
{ name: 'petabytes/sec', id: DataRateFormats.PetaBytesPerSec },
|
||||
{ name: 'petabits/sec', id: DataRateFormats.PetaBitsPerSec },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.HashRate,
|
||||
formats: [
|
||||
{ name: 'hashes/sec', id: HashRateFormats.HashesPerSec },
|
||||
{ name: 'kilohashes/sec', id: HashRateFormats.KiloHashesPerSec },
|
||||
{ name: 'megahashes/sec', id: HashRateFormats.MegaHashesPerSec },
|
||||
{ name: 'gigahashes/sec', id: HashRateFormats.GigaHashesPerSec },
|
||||
{ name: 'terahashes/sec', id: HashRateFormats.TeraHashesPerSec },
|
||||
{ name: 'petahashes/sec', id: HashRateFormats.PetaHashesPerSec },
|
||||
{ name: 'exahashes/sec', id: HashRateFormats.ExaHashesPerSec },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Miscellaneous,
|
||||
formats: [
|
||||
{ name: 'none', id: MiscellaneousFormats.None },
|
||||
{ name: 'String', id: MiscellaneousFormats.String },
|
||||
{ name: 'short', id: MiscellaneousFormats.Short },
|
||||
{ name: 'Percent (0-100)', id: MiscellaneousFormats.Percent },
|
||||
{ name: 'Percent (0.0-1.0)', id: MiscellaneousFormats.PercentUnit },
|
||||
{ name: 'Humidity (%H)', id: MiscellaneousFormats.Humidity },
|
||||
{ name: 'Decibel', id: MiscellaneousFormats.Decibel },
|
||||
{ name: 'Hexadecimal (0x)', id: MiscellaneousFormats.Hexadecimal0x },
|
||||
{ name: 'Hexadecimal', id: MiscellaneousFormats.Hexadecimal },
|
||||
{ name: 'Scientific notation', id: MiscellaneousFormats.ScientificNotation },
|
||||
{ name: 'Locale format', id: MiscellaneousFormats.LocaleFormat },
|
||||
{ name: 'Pixels', id: MiscellaneousFormats.Pixels },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Acceleration,
|
||||
formats: [
|
||||
{ name: 'Meters/sec²', id: AccelerationFormats.MetersPerSecondSquared },
|
||||
{ name: 'Feet/sec²', id: AccelerationFormats.FeetPerSecondSquared },
|
||||
{ name: 'G unit', id: AccelerationFormats.GUnit },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Angle,
|
||||
formats: [
|
||||
{ name: 'Degrees (°)', id: AngularFormats.Degree },
|
||||
{ name: 'Radians', id: AngularFormats.Radian },
|
||||
{ name: 'Gradian', id: AngularFormats.Gradian },
|
||||
{ name: 'Arc Minutes', id: AngularFormats.ArcMinute },
|
||||
{ name: 'Arc Seconds', id: AngularFormats.ArcSecond },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Area,
|
||||
formats: [
|
||||
{ name: 'Square Meters (m²)', id: AreaFormats.SquareMeters },
|
||||
{ name: 'Square Feet (ft²)', id: AreaFormats.SquareFeet },
|
||||
{ name: 'Square Miles (mi²)', id: AreaFormats.SquareMiles },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Computation,
|
||||
formats: [
|
||||
{ name: 'FLOP/s', id: FlopsFormats.FLOPs },
|
||||
{ name: 'MFLOP/s', id: FlopsFormats.MFLOPs },
|
||||
{ name: 'GFLOP/s', id: FlopsFormats.GFLOPs },
|
||||
{ name: 'TFLOP/s', id: FlopsFormats.TFLOPs },
|
||||
{ name: 'PFLOP/s', id: FlopsFormats.PFLOPs },
|
||||
{ name: 'EFLOP/s', id: FlopsFormats.EFLOPs },
|
||||
{ name: 'ZFLOP/s', id: FlopsFormats.ZFLOPs },
|
||||
{ name: 'YFLOP/s', id: FlopsFormats.YFLOPs },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Concentration,
|
||||
formats: [
|
||||
{ name: 'parts-per-million (ppm)', id: ConcentrationFormats.PPM },
|
||||
{ name: 'parts-per-billion (ppb)', id: ConcentrationFormats.PPB },
|
||||
{ name: 'nanogram per cubic meter (ng/m³)', id: ConcentrationFormats.NgM3 },
|
||||
{
|
||||
name: 'nanogram per normal cubic meter (ng/Nm³)',
|
||||
id: ConcentrationFormats.NgNM3,
|
||||
},
|
||||
{ name: 'microgram per cubic meter (μg/m³)', id: ConcentrationFormats.UgM3 },
|
||||
{
|
||||
name: 'microgram per normal cubic meter (μg/Nm³)',
|
||||
id: ConcentrationFormats.UgNM3,
|
||||
},
|
||||
{ name: 'milligram per cubic meter (mg/m³)', id: ConcentrationFormats.MgM3 },
|
||||
{
|
||||
name: 'milligram per normal cubic meter (mg/Nm³)',
|
||||
id: ConcentrationFormats.MgNM3,
|
||||
},
|
||||
{ name: 'gram per cubic meter (g/m³)', id: ConcentrationFormats.GM3 },
|
||||
{
|
||||
name: 'gram per normal cubic meter (g/Nm³)',
|
||||
id: ConcentrationFormats.GNM3,
|
||||
},
|
||||
{ name: 'milligrams per decilitre (mg/dL)', id: ConcentrationFormats.MgDL },
|
||||
{ name: 'millimoles per litre (mmol/L)', id: ConcentrationFormats.MmolL },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Currency,
|
||||
formats: [
|
||||
{ name: 'Dollars ($)', id: CurrencyFormats.USD },
|
||||
{ name: 'Pounds (£)', id: CurrencyFormats.GBP },
|
||||
{ name: 'Euro (€)', id: CurrencyFormats.EUR },
|
||||
{ name: 'Yen (¥)', id: CurrencyFormats.JPY },
|
||||
{ name: 'Rubles (₽)', id: CurrencyFormats.RUB },
|
||||
{ name: 'Hryvnias (₴)', id: CurrencyFormats.UAH },
|
||||
{ name: 'Real (R$)', id: CurrencyFormats.BRL },
|
||||
{ name: 'Danish Krone (kr)', id: CurrencyFormats.DKK },
|
||||
{ name: 'Icelandic Króna (kr)', id: CurrencyFormats.ISK },
|
||||
{ name: 'Norwegian Krone (kr)', id: CurrencyFormats.NOK },
|
||||
{ name: 'Swedish Krona (kr)', id: CurrencyFormats.SEK },
|
||||
{ name: 'Czech koruna (czk)', id: CurrencyFormats.CZK },
|
||||
{ name: 'Swiss franc (CHF)', id: CurrencyFormats.CHF },
|
||||
{ name: 'Polish Złoty (PLN)', id: CurrencyFormats.PLN },
|
||||
{ name: 'Bitcoin (฿)', id: CurrencyFormats.BTC },
|
||||
{ name: 'Milli Bitcoin (฿)', id: CurrencyFormats.MBTC },
|
||||
{ name: 'Micro Bitcoin (฿)', id: CurrencyFormats.UBTC },
|
||||
{ name: 'South African Rand (R)', id: CurrencyFormats.ZAR },
|
||||
{ name: 'Indian Rupee (₹)', id: CurrencyFormats.INR },
|
||||
{ name: 'South Korean Won (₩)', id: CurrencyFormats.KRW },
|
||||
{ name: 'Indonesian Rupiah (Rp)', id: CurrencyFormats.IDR },
|
||||
{ name: 'Philippine Peso (PHP)', id: CurrencyFormats.PHP },
|
||||
{ name: 'Vietnamese Dong (VND)', id: CurrencyFormats.VND },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Datetime,
|
||||
formats: [
|
||||
{ name: 'Datetime ISO', id: DatetimeFormats.ISO },
|
||||
{
|
||||
name: 'Datetime ISO (No date if today)',
|
||||
id: DatetimeFormats.ISONoDateIfToday,
|
||||
},
|
||||
{ name: 'Datetime US', id: DatetimeFormats.US },
|
||||
{
|
||||
name: 'Datetime US (No date if today)',
|
||||
id: DatetimeFormats.USNoDateIfToday,
|
||||
},
|
||||
{ name: 'Datetime local', id: DatetimeFormats.Local },
|
||||
{
|
||||
name: 'Datetime local (No date if today)',
|
||||
id: DatetimeFormats.LocalNoDateIfToday,
|
||||
},
|
||||
{ name: 'Datetime default', id: DatetimeFormats.System },
|
||||
{ name: 'From Now', id: DatetimeFormats.FromNow },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Energy,
|
||||
formats: [
|
||||
{ name: 'Watt (W)', id: PowerElectricalFormats.WATT },
|
||||
{ name: 'Kilowatt (kW)', id: PowerElectricalFormats.KWATT },
|
||||
{ name: 'Megawatt (MW)', id: PowerElectricalFormats.MEGWATT },
|
||||
{ name: 'Gigawatt (GW)', id: PowerElectricalFormats.GWATT },
|
||||
{ name: 'Milliwatt (mW)', id: PowerElectricalFormats.MWATT },
|
||||
{ name: 'Watt per square meter (W/m²)', id: PowerElectricalFormats.WM2 },
|
||||
{ name: 'Volt-Ampere (VA)', id: PowerElectricalFormats.VOLTAMP },
|
||||
{ name: 'Kilovolt-Ampere (kVA)', id: PowerElectricalFormats.KVOLTAMP },
|
||||
{
|
||||
name: 'Volt-Ampere reactive (VAr)',
|
||||
id: PowerElectricalFormats.VOLTAMPREACT,
|
||||
},
|
||||
{
|
||||
name: 'Kilovolt-Ampere reactive (kVAr)',
|
||||
id: PowerElectricalFormats.KVOLTAMPREACT,
|
||||
},
|
||||
{ name: 'Watt-hour (Wh)', id: PowerElectricalFormats.WATTH },
|
||||
{
|
||||
name: 'Watt-hour per Kilogram (Wh/kg)',
|
||||
id: PowerElectricalFormats.WATTHPERKG,
|
||||
},
|
||||
{ name: 'Kilowatt-hour (kWh)', id: PowerElectricalFormats.KWATTH },
|
||||
{ name: 'Kilowatt-min (kWm)', id: PowerElectricalFormats.KWATTM },
|
||||
{ name: 'Ampere-hour (Ah)', id: PowerElectricalFormats.AMPH },
|
||||
{ name: 'Kiloampere-hour (kAh)', id: PowerElectricalFormats.KAMPH },
|
||||
{ name: 'Milliampere-hour (mAh)', id: PowerElectricalFormats.MAMPH },
|
||||
{ name: 'Joule (J)', id: PowerElectricalFormats.JOULE },
|
||||
{ name: 'Electron volt (eV)', id: PowerElectricalFormats.EV },
|
||||
{ name: 'Ampere (A)', id: PowerElectricalFormats.AMP },
|
||||
{ name: 'Kiloampere (kA)', id: PowerElectricalFormats.KAMP },
|
||||
{ name: 'Milliampere (mA)', id: PowerElectricalFormats.MAMP },
|
||||
{ name: 'Volt (V)', id: PowerElectricalFormats.VOLT },
|
||||
{ name: 'Kilovolt (kV)', id: PowerElectricalFormats.KVOLT },
|
||||
{ name: 'Millivolt (mV)', id: PowerElectricalFormats.MVOLT },
|
||||
{ name: 'Decibel-milliwatt (dBm)', id: PowerElectricalFormats.DBM },
|
||||
{ name: 'Ohm (Ω)', id: PowerElectricalFormats.OHM },
|
||||
{ name: 'Kiloohm (kΩ)', id: PowerElectricalFormats.KOHM },
|
||||
{ name: 'Megaohm (MΩ)', id: PowerElectricalFormats.MOHM },
|
||||
{ name: 'Farad (F)', id: PowerElectricalFormats.FARAD },
|
||||
{ name: 'Microfarad (µF)', id: PowerElectricalFormats.µFARAD },
|
||||
{ name: 'Nanofarad (nF)', id: PowerElectricalFormats.NFARAD },
|
||||
{ name: 'Picofarad (pF)', id: PowerElectricalFormats.PFARAD },
|
||||
{ name: 'Femtofarad (fF)', id: PowerElectricalFormats.FFARAD },
|
||||
{ name: 'Henry (H)', id: PowerElectricalFormats.HENRY },
|
||||
{ name: 'Millihenry (mH)', id: PowerElectricalFormats.MHENRY },
|
||||
{ name: 'Microhenry (µH)', id: PowerElectricalFormats.µHENRY },
|
||||
{ name: 'Lumens (Lm)', id: PowerElectricalFormats.LUMENS },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Flow,
|
||||
formats: [
|
||||
{ name: 'Gallons/min (gpm)', id: FlowFormats.FLOWGPM },
|
||||
{ name: 'Cubic meters/sec (cms)', id: FlowFormats.FLOWCMS },
|
||||
{ name: 'Cubic feet/sec (cfs)', id: FlowFormats.FLOWCFS },
|
||||
{ name: 'Cubic feet/min (cfm)', id: FlowFormats.FLOWCFM },
|
||||
{ name: 'Litre/hour', id: FlowFormats.LITREH },
|
||||
{ name: 'Litre/min (L/min)', id: FlowFormats.FLOWLPM },
|
||||
{ name: 'milliLitre/min (mL/min)', id: FlowFormats.FLOWMLPM },
|
||||
{ name: 'Lux (lx)', id: FlowFormats.LUX },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Force,
|
||||
formats: [
|
||||
{ name: 'Newton-meters (Nm)', id: ForceFormats.FORCENM },
|
||||
{ name: 'Kilonewton-meters (kNm)', id: ForceFormats.FORCEKNM },
|
||||
{ name: 'Newtons (N)', id: ForceFormats.FORCEN },
|
||||
{ name: 'Kilonewtons (kN)', id: ForceFormats.FORCEKN },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Mass,
|
||||
formats: [
|
||||
{ name: 'milligram (mg)', id: MassFormats.MASSMG },
|
||||
{ name: 'gram (g)', id: MassFormats.MASSG },
|
||||
{ name: 'pound (lb)', id: MassFormats.MASSLB },
|
||||
{ name: 'kilogram (kg)', id: MassFormats.MASSKG },
|
||||
{ name: 'metric ton (t)', id: MassFormats.MASST },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Length,
|
||||
formats: [
|
||||
{ name: 'millimeter (mm)', id: LengthFormats.LENGTHMM },
|
||||
{ name: 'inch (in)', id: LengthFormats.LENGTHIN },
|
||||
{ name: 'feet (ft)', id: LengthFormats.LENGTHFT },
|
||||
{ name: 'meter (m)', id: LengthFormats.LENGTHM },
|
||||
{ name: 'kilometer (km)', id: LengthFormats.LENGTHKM },
|
||||
{ name: 'mile (mi)', id: LengthFormats.LENGTHMI },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Pressure,
|
||||
formats: [
|
||||
{ name: 'Millibars', id: PressureFormats.PRESSUREMBAR },
|
||||
{ name: 'Bars', id: PressureFormats.PRESSUREBAR },
|
||||
{ name: 'Kilobars', id: PressureFormats.PRESSUREKBAR },
|
||||
{ name: 'Pascals', id: PressureFormats.PRESSUREPA },
|
||||
{ name: 'Hectopascals', id: PressureFormats.PRESSUREHPA },
|
||||
{ name: 'Kilopascals', id: PressureFormats.PRESSUREKPA },
|
||||
{ name: 'Inches of mercury', id: PressureFormats.PRESSUREHG },
|
||||
{ name: 'PSI', id: PressureFormats.PRESSUREPSI },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Radiation,
|
||||
formats: [
|
||||
{ name: 'Becquerel (Bq)', id: RadiationFormats.RADBQ },
|
||||
{ name: 'curie (Ci)', id: RadiationFormats.RADCI },
|
||||
{ name: 'Gray (Gy)', id: RadiationFormats.RADGY },
|
||||
{ name: 'rad', id: RadiationFormats.RADRAD },
|
||||
{ name: 'Sievert (Sv)', id: RadiationFormats.RADSV },
|
||||
{ name: 'milliSievert (mSv)', id: RadiationFormats.RADMSV },
|
||||
{ name: 'microSievert (µSv)', id: RadiationFormats.RADUSV },
|
||||
{ name: 'rem', id: RadiationFormats.RADREM },
|
||||
{ name: 'Exposure (C/kg)', id: RadiationFormats.RADEXPCKG },
|
||||
{ name: 'roentgen (R)', id: RadiationFormats.RADR },
|
||||
{ name: 'Sievert/hour (Sv/h)', id: RadiationFormats.RADSVH },
|
||||
{ name: 'milliSievert/hour (mSv/h)', id: RadiationFormats.RADMSVH },
|
||||
{ name: 'microSievert/hour (µSv/h)', id: RadiationFormats.RADUSVH },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.RotationSpeed,
|
||||
formats: [
|
||||
{ name: 'Revolutions per minute (rpm)', id: RotationSpeedFormats.ROTRPM },
|
||||
{ name: 'Hertz (Hz)', id: RotationSpeedFormats.ROTHZ },
|
||||
{ name: 'Radians per second (rad/s)', id: RotationSpeedFormats.ROTRADS },
|
||||
{ name: 'Degrees per second (°/s)', id: RotationSpeedFormats.ROTDEGS },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Temperature,
|
||||
formats: [
|
||||
{ name: 'Celsius (°C)', id: TemperatureFormats.CELSIUS },
|
||||
{ name: 'Fahrenheit (°F)', id: TemperatureFormats.FAHRENHEIT },
|
||||
{ name: 'Kelvin (K)', id: TemperatureFormats.KELVIN },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Velocity,
|
||||
formats: [
|
||||
{ name: 'meters/second (m/s)', id: VelocityFormats.METERS_PER_SECOND },
|
||||
{ name: 'kilometers/hour (km/h)', id: VelocityFormats.KILOMETERS_PER_HOUR },
|
||||
{ name: 'miles/hour (mph)', id: VelocityFormats.MILES_PER_HOUR },
|
||||
{ name: 'knot (kn)', id: VelocityFormats.KNOT },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Volume,
|
||||
formats: [
|
||||
{ name: 'millilitre (mL)', id: VolumeFormats.MILLILITRE },
|
||||
{ name: 'litre (L)', id: VolumeFormats.LITRE },
|
||||
{ name: 'cubic meter', id: VolumeFormats.CUBIC_METER },
|
||||
{ name: 'Normal cubic meter', id: VolumeFormats.NORMAL_CUBIC_METER },
|
||||
{ name: 'cubic decimeter', id: VolumeFormats.CUBIC_DECIMETER },
|
||||
{ name: 'gallons', id: VolumeFormats.GALLONS },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: CategoryNames.Boolean,
|
||||
formats: [
|
||||
{ name: 'True / False', id: BooleanFormats.TRUE_FALSE },
|
||||
{ name: 'Yes / No', id: BooleanFormats.YES_NO },
|
||||
{ name: 'On / Off', id: BooleanFormats.ON_OFF },
|
||||
],
|
||||
},
|
||||
];
|
||||
// Function to get the category name for a given unit ID (Grafana or universal)
|
||||
export const getCategoryName = (unitId: string): YAxisCategoryNames | null => {
|
||||
const categories = getYAxisCategories(YAxisSource.DASHBOARDS);
|
||||
|
||||
export const flattenedCategories = flattenDeep(
|
||||
dataTypeCategories.map((category) => category.formats),
|
||||
);
|
||||
const foundCategory = categories.find((category) =>
|
||||
category.units.some((unit) => {
|
||||
// Units in Y-axis categories use universal unit IDs.
|
||||
// Thresholds / column units often use Grafana-style IDs.
|
||||
// Treat a unit as matching if either:
|
||||
// - it is already the universal ID, or
|
||||
// - it matches the mapped Grafana ID for that universal unit.
|
||||
if (unit.id === unitId) {
|
||||
return true;
|
||||
}
|
||||
|
||||
type ConversionFactors = {
|
||||
[key: string]: {
|
||||
[key: string]: number | null;
|
||||
};
|
||||
const grafanaId = UniversalUnitToGrafanaUnit[unit.id];
|
||||
return grafanaId === unitId;
|
||||
}),
|
||||
);
|
||||
|
||||
return foundCategory ? foundCategory.name : null;
|
||||
};
|
||||
|
||||
// Object containing conversion factors for various categories and formats
|
||||
const conversionFactors: ConversionFactors = {
|
||||
[CategoryNames.Time]: {
|
||||
[TimeFormats.Hertz]: 1,
|
||||
[TimeFormats.Nanoseconds]: 1e-9,
|
||||
[TimeFormats.Microseconds]: 1e-6,
|
||||
[TimeFormats.Milliseconds]: 1e-3,
|
||||
[TimeFormats.Seconds]: 1,
|
||||
[TimeFormats.Minutes]: 60,
|
||||
[TimeFormats.Hours]: 3600,
|
||||
[TimeFormats.Days]: 86400,
|
||||
[TimeFormats.DurationMs]: 1e-3,
|
||||
[TimeFormats.DurationS]: 1,
|
||||
[TimeFormats.DurationHms]: null, // Requires special handling
|
||||
[TimeFormats.DurationDhms]: null, // Requires special handling
|
||||
[TimeFormats.Timeticks]: null, // Requires special handling
|
||||
[TimeFormats.ClockMs]: 1e-3,
|
||||
[TimeFormats.ClockS]: 1,
|
||||
},
|
||||
[CategoryNames.Throughput]: {
|
||||
[ThroughputFormats.CountsPerSec]: 1,
|
||||
[ThroughputFormats.OpsPerSec]: 1,
|
||||
[ThroughputFormats.RequestsPerSec]: 1,
|
||||
[ThroughputFormats.ReadsPerSec]: 1,
|
||||
[ThroughputFormats.WritesPerSec]: 1,
|
||||
[ThroughputFormats.IOOpsPerSec]: 1,
|
||||
[ThroughputFormats.CountsPerMin]: 1 / 60,
|
||||
[ThroughputFormats.OpsPerMin]: 1 / 60,
|
||||
[ThroughputFormats.ReadsPerMin]: 1 / 60,
|
||||
[ThroughputFormats.WritesPerMin]: 1 / 60,
|
||||
},
|
||||
[CategoryNames.Data]: {
|
||||
[DataFormats.BytesIEC]: 1,
|
||||
[DataFormats.BytesSI]: 1,
|
||||
[DataFormats.BitsIEC]: 0.125,
|
||||
[DataFormats.BitsSI]: 0.125,
|
||||
[DataFormats.KibiBytes]: 1024,
|
||||
[DataFormats.KiloBytes]: 1000,
|
||||
[DataFormats.MebiBytes]: 1048576,
|
||||
[DataFormats.MegaBytes]: 1000000,
|
||||
[DataFormats.GibiBytes]: 1073741824,
|
||||
[DataFormats.GigaBytes]: 1000000000,
|
||||
[DataFormats.TebiBytes]: 1099511627776,
|
||||
[DataFormats.TeraBytes]: 1000000000000,
|
||||
[DataFormats.PebiBytes]: 1125899906842624,
|
||||
[DataFormats.PetaBytes]: 1000000000000000,
|
||||
},
|
||||
[CategoryNames.DataRate]: {
|
||||
[DataRateFormats.PacketsPerSec]: null, // Cannot convert directly to other data rates
|
||||
[DataRateFormats.BytesPerSecIEC]: 1,
|
||||
[DataRateFormats.BytesPerSecSI]: 1,
|
||||
[DataRateFormats.BitsPerSecIEC]: 0.125,
|
||||
[DataRateFormats.BitsPerSecSI]: 0.125,
|
||||
[DataRateFormats.KibiBytesPerSec]: 1024,
|
||||
[DataRateFormats.KibiBitsPerSec]: 128,
|
||||
[DataRateFormats.KiloBytesPerSec]: 1000,
|
||||
[DataRateFormats.KiloBitsPerSec]: 125,
|
||||
[DataRateFormats.MebiBytesPerSec]: 1048576,
|
||||
[DataRateFormats.MebiBitsPerSec]: 131072,
|
||||
[DataRateFormats.MegaBytesPerSec]: 1000000,
|
||||
[DataRateFormats.MegaBitsPerSec]: 125000,
|
||||
[DataRateFormats.GibiBytesPerSec]: 1073741824,
|
||||
[DataRateFormats.GibiBitsPerSec]: 134217728,
|
||||
[DataRateFormats.GigaBytesPerSec]: 1000000000,
|
||||
[DataRateFormats.GigaBitsPerSec]: 125000000,
|
||||
[DataRateFormats.TebiBytesPerSec]: 1099511627776,
|
||||
[DataRateFormats.TebiBitsPerSec]: 137438953472,
|
||||
[DataRateFormats.TeraBytesPerSec]: 1000000000000,
|
||||
[DataRateFormats.TeraBitsPerSec]: 125000000000,
|
||||
[DataRateFormats.PebiBytesPerSec]: 1125899906842624,
|
||||
[DataRateFormats.PebiBitsPerSec]: 140737488355328,
|
||||
[DataRateFormats.PetaBytesPerSec]: 1000000000000000,
|
||||
[DataRateFormats.PetaBitsPerSec]: 125000000000000,
|
||||
},
|
||||
[CategoryNames.Miscellaneous]: {
|
||||
[MiscellaneousFormats.None]: null,
|
||||
[MiscellaneousFormats.String]: null,
|
||||
[MiscellaneousFormats.Short]: null,
|
||||
[MiscellaneousFormats.Percent]: 1,
|
||||
[MiscellaneousFormats.PercentUnit]: 100,
|
||||
[MiscellaneousFormats.Humidity]: 1,
|
||||
[MiscellaneousFormats.Decibel]: null,
|
||||
[MiscellaneousFormats.Hexadecimal0x]: null,
|
||||
[MiscellaneousFormats.Hexadecimal]: null,
|
||||
[MiscellaneousFormats.ScientificNotation]: null,
|
||||
[MiscellaneousFormats.LocaleFormat]: null,
|
||||
[MiscellaneousFormats.Pixels]: null,
|
||||
},
|
||||
[CategoryNames.Boolean]: {
|
||||
[BooleanFormats.TRUE_FALSE]: null, // Not convertible
|
||||
[BooleanFormats.YES_NO]: null, // Not convertible
|
||||
[BooleanFormats.ON_OFF]: null, // Not convertible
|
||||
},
|
||||
};
|
||||
|
||||
// Function to get the conversion factor between two units in a specific category
|
||||
function getConversionFactor(
|
||||
fromUnit: string,
|
||||
toUnit: string,
|
||||
category: CategoryNames,
|
||||
): number | null {
|
||||
// Retrieves the conversion factors for the specified category
|
||||
const categoryFactors = conversionFactors[category];
|
||||
if (!categoryFactors) {
|
||||
return null; // Returns null if the category does not exist
|
||||
}
|
||||
const fromFactor = categoryFactors[fromUnit];
|
||||
const toFactor = categoryFactors[toUnit];
|
||||
if (
|
||||
fromFactor === undefined ||
|
||||
toFactor === undefined ||
|
||||
fromFactor === null ||
|
||||
toFactor === null
|
||||
) {
|
||||
return null; // Returns null if either unit does not exist or is not convertible
|
||||
}
|
||||
return fromFactor / toFactor; // Returns the conversion factor ratio
|
||||
}
|
||||
|
||||
// Function to convert a value from one unit to another
|
||||
export function convertUnit(
|
||||
value: number,
|
||||
fromUnitId?: string,
|
||||
toUnitId?: string,
|
||||
): number | null {
|
||||
let fromUnit: string | undefined;
|
||||
let toUnit: string | undefined;
|
||||
|
||||
// Finds the category that contains the specified units and extracts fromUnit and toUnit using array methods
|
||||
const category = dataTypeCategories.find((category) =>
|
||||
category.formats.some((format) => {
|
||||
if (format.id === fromUnitId) {
|
||||
fromUnit = format.id;
|
||||
}
|
||||
if (format.id === toUnitId) {
|
||||
toUnit = format.id;
|
||||
}
|
||||
return fromUnit && toUnit; // Break out early if both units are found
|
||||
}),
|
||||
);
|
||||
|
||||
if (!category || !fromUnit || !toUnit) {
|
||||
if (!fromUnitId || !toUnitId) {
|
||||
return null;
|
||||
} // Return null if category or units are not found
|
||||
}
|
||||
|
||||
// Gets the conversion factor for the specified units
|
||||
const conversionFactor = getConversionFactor(
|
||||
fromUnit,
|
||||
toUnit,
|
||||
category.name as any,
|
||||
);
|
||||
if (conversionFactor === null) {
|
||||
const fromCategory = getCategoryName(fromUnitId);
|
||||
const toCategory = getCategoryName(toUnitId);
|
||||
|
||||
// If either unit is unknown or the categories don't match, the conversion is invalid
|
||||
if (!fromCategory || !toCategory || fromCategory !== toCategory) {
|
||||
return null;
|
||||
} // Return null if conversion is not possible
|
||||
}
|
||||
|
||||
return value * conversionFactor;
|
||||
// Delegate the actual numeric conversion (or identity) to the shared helper,
|
||||
// which understands both Grafana-style and universal unit IDs.
|
||||
return convertValue(value, fromUnitId, toUnitId);
|
||||
}
|
||||
|
||||
// Function to get the category name for a given unit ID
|
||||
export const getCategoryName = (unitId: string): CategoryNames | null => {
|
||||
// Finds the category that contains the specified unit ID
|
||||
const foundCategory = dataTypeCategories.find((category) =>
|
||||
category.formats.some((format) => format.id === unitId),
|
||||
);
|
||||
return foundCategory ? (foundCategory.name as CategoryNames) : null;
|
||||
};
|
||||
|
||||
@@ -2,6 +2,9 @@ import { Layout } from 'react-grid-layout';
|
||||
import { DefaultOptionType } from 'antd/es/select';
|
||||
import { omitIdFromQuery } from 'components/ExplorerCard/utils';
|
||||
import { PrecisionOptionsEnum } from 'components/Graph/types';
|
||||
import { YAxisCategoryNames } from 'components/YAxisUnitSelector/constants';
|
||||
import { YAxisSource } from 'components/YAxisUnitSelector/types';
|
||||
import { getYAxisCategories } from 'components/YAxisUnitSelector/utils';
|
||||
import {
|
||||
initialQueryBuilderFormValuesMap,
|
||||
PANEL_TYPES,
|
||||
@@ -21,11 +24,7 @@ import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import {
|
||||
dataTypeCategories,
|
||||
getCategoryName,
|
||||
} from './RightContainer/dataFormatCategories';
|
||||
import { CategoryNames } from './RightContainer/types';
|
||||
import { getCategoryName } from './RightContainer/dataFormatCategories';
|
||||
|
||||
export const getIsQueryModified = (
|
||||
currentQuery: Query,
|
||||
@@ -606,14 +605,21 @@ export const PANEL_TYPE_TO_QUERY_TYPES: Record<PANEL_TYPES, EQueryType[]> = {
|
||||
* the label and value for each format.
|
||||
*/
|
||||
export const getCategorySelectOptionByName = (
|
||||
name?: CategoryNames | string,
|
||||
): DefaultOptionType[] =>
|
||||
dataTypeCategories
|
||||
.find((category) => category.name === name)
|
||||
?.formats.map((format) => ({
|
||||
label: format.name,
|
||||
value: format.id,
|
||||
})) || [];
|
||||
name?: YAxisCategoryNames,
|
||||
): DefaultOptionType[] => {
|
||||
const categories = getYAxisCategories(YAxisSource.DASHBOARDS);
|
||||
if (!categories.length) {
|
||||
return [];
|
||||
}
|
||||
return (
|
||||
categories
|
||||
.find((category) => category.name === name)
|
||||
?.units.map((unit) => ({
|
||||
label: unit.name,
|
||||
value: unit.id,
|
||||
})) || []
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Generates unit options based on the provided column unit.
|
||||
|
||||
@@ -116,7 +116,7 @@ describe('SSOEnforcementToggle', () => {
|
||||
render(
|
||||
<SSOEnforcementToggle
|
||||
isDefaultChecked={true}
|
||||
record={{ ...mockGoogleAuthDomain, id: undefined }}
|
||||
record={{ ...mockGoogleAuthDomain, id: '' }}
|
||||
/>,
|
||||
);
|
||||
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
import { CategoryNames } from 'container/NewWidget/RightContainer/types';
|
||||
import { YAxisCategoryNames } from 'components/YAxisUnitSelector/constants';
|
||||
|
||||
export const categoryToSupport = [
|
||||
CategoryNames.Data,
|
||||
CategoryNames.DataRate,
|
||||
CategoryNames.Time,
|
||||
CategoryNames.Throughput,
|
||||
CategoryNames.Miscellaneous,
|
||||
CategoryNames.Boolean,
|
||||
export const categoryToSupport: YAxisCategoryNames[] = [
|
||||
YAxisCategoryNames.None,
|
||||
YAxisCategoryNames.Data,
|
||||
YAxisCategoryNames.DataRate,
|
||||
YAxisCategoryNames.Time,
|
||||
YAxisCategoryNames.Count,
|
||||
YAxisCategoryNames.Operations,
|
||||
YAxisCategoryNames.Percentage,
|
||||
YAxisCategoryNames.Miscellaneous,
|
||||
YAxisCategoryNames.Boolean,
|
||||
];
|
||||
|
||||
55
frontend/src/hooks/dashboard/useDashboardListSortOrder.ts
Normal file
55
frontend/src/hooks/dashboard/useDashboardListSortOrder.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import { useState } from 'react';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import isEqual from 'lodash-es/isEqual';
|
||||
|
||||
export interface DashboardSortOrder {
|
||||
columnKey: string;
|
||||
order: string;
|
||||
pagination: string;
|
||||
search: string;
|
||||
}
|
||||
|
||||
const SUPPORTED_COLUMN_KEYS = ['createdAt', 'updatedAt'];
|
||||
const SUPPORTED_ORDER_KEYS = ['ascend', 'descend'];
|
||||
|
||||
export function useDashboardListSortOrder(): {
|
||||
listSortOrder: DashboardSortOrder;
|
||||
setListSortOrder: (sortOrder: DashboardSortOrder) => void;
|
||||
} {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const params = useUrlQuery();
|
||||
|
||||
const orderColumnParam = params.get('columnKey');
|
||||
const orderQueryParam = params.get('order');
|
||||
const paginationParam = params.get('page');
|
||||
const searchParam = params.get('search');
|
||||
|
||||
const [listSortOrder, setListOrder] = useState<DashboardSortOrder>({
|
||||
columnKey:
|
||||
orderColumnParam && SUPPORTED_COLUMN_KEYS.includes(orderColumnParam)
|
||||
? orderColumnParam
|
||||
: 'updatedAt',
|
||||
order:
|
||||
orderQueryParam && SUPPORTED_ORDER_KEYS.includes(orderQueryParam)
|
||||
? orderQueryParam
|
||||
: 'descend',
|
||||
pagination: paginationParam || '1',
|
||||
search: searchParam || '',
|
||||
});
|
||||
|
||||
function setListSortOrder(sortOrder: DashboardSortOrder): void {
|
||||
if (!isEqual(sortOrder, listSortOrder)) {
|
||||
setListOrder(sortOrder);
|
||||
}
|
||||
|
||||
params.set('columnKey', sortOrder.columnKey);
|
||||
params.set('order', sortOrder.order);
|
||||
params.set('page', sortOrder.pagination || '1');
|
||||
params.set('search', sortOrder.search || '');
|
||||
|
||||
safeNavigate({ search: params.toString() });
|
||||
}
|
||||
|
||||
return { listSortOrder, setListSortOrder };
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
import { act, renderHook } from '@testing-library/react';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { useIsTextSelected } from 'hooks/useIsTextSelected';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
|
||||
import useLogDetailHandlers from '../useLogDetailHandlers';
|
||||
|
||||
jest.mock('hooks/logs/useActiveLog');
|
||||
jest.mock('hooks/useIsTextSelected');
|
||||
|
||||
const mockOnSetActiveLog = jest.fn();
|
||||
const mockOnClearActiveLog = jest.fn();
|
||||
const mockOnAddToQuery = jest.fn();
|
||||
const mockOnGroupByAttribute = jest.fn();
|
||||
const mockIsTextSelected = jest.fn();
|
||||
|
||||
const mockLog: ILog = {
|
||||
id: 'log-1',
|
||||
timestamp: '2024-01-01T00:00:00Z',
|
||||
date: '2024-01-01',
|
||||
body: 'test log body',
|
||||
severityText: 'INFO',
|
||||
severityNumber: 9,
|
||||
traceFlags: 0,
|
||||
traceId: '',
|
||||
spanID: '',
|
||||
attributesString: {},
|
||||
attributesInt: {},
|
||||
attributesFloat: {},
|
||||
resources_string: {},
|
||||
scope_string: {},
|
||||
attributes_string: {},
|
||||
severity_text: '',
|
||||
severity_number: 0,
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
jest.mocked(useIsTextSelected).mockReturnValue(mockIsTextSelected);
|
||||
|
||||
jest.mocked(useActiveLog).mockReturnValue({
|
||||
activeLog: null,
|
||||
onSetActiveLog: mockOnSetActiveLog,
|
||||
onClearActiveLog: mockOnClearActiveLog,
|
||||
onAddToQuery: mockOnAddToQuery,
|
||||
onGroupByAttribute: mockOnGroupByAttribute,
|
||||
});
|
||||
});
|
||||
|
||||
it('should not open log detail when text is selected', () => {
|
||||
mockIsTextSelected.mockReturnValue(true);
|
||||
|
||||
const { result } = renderHook(() => useLogDetailHandlers());
|
||||
|
||||
act(() => {
|
||||
result.current.handleSetActiveLog(mockLog);
|
||||
});
|
||||
|
||||
expect(mockOnSetActiveLog).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should open log detail when no text is selected', () => {
|
||||
mockIsTextSelected.mockReturnValue(false);
|
||||
|
||||
const { result } = renderHook(() => useLogDetailHandlers());
|
||||
|
||||
act(() => {
|
||||
result.current.handleSetActiveLog(mockLog);
|
||||
});
|
||||
|
||||
expect(mockOnSetActiveLog).toHaveBeenCalledWith(mockLog);
|
||||
});
|
||||
|
||||
it('should toggle off when clicking the same active log', () => {
|
||||
mockIsTextSelected.mockReturnValue(false);
|
||||
|
||||
jest.mocked(useActiveLog).mockReturnValue({
|
||||
activeLog: mockLog,
|
||||
onSetActiveLog: mockOnSetActiveLog,
|
||||
onClearActiveLog: mockOnClearActiveLog,
|
||||
onAddToQuery: mockOnAddToQuery,
|
||||
onGroupByAttribute: mockOnGroupByAttribute,
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useLogDetailHandlers());
|
||||
|
||||
act(() => {
|
||||
result.current.handleSetActiveLog(mockLog);
|
||||
});
|
||||
|
||||
expect(mockOnClearActiveLog).toHaveBeenCalled();
|
||||
expect(mockOnSetActiveLog).not.toHaveBeenCalled();
|
||||
});
|
||||
@@ -2,6 +2,7 @@ import { useCallback, useState } from 'react';
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
import type { UseActiveLog } from 'hooks/logs/types';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { useIsTextSelected } from 'hooks/useIsTextSelected';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
|
||||
type SelectedTab = typeof VIEW_TYPES[keyof typeof VIEW_TYPES] | undefined;
|
||||
@@ -28,9 +29,13 @@ function useLogDetailHandlers({
|
||||
onAddToQuery,
|
||||
} = useActiveLog();
|
||||
const [selectedTab, setSelectedTab] = useState<SelectedTab>(defaultTab);
|
||||
const isTextSelected = useIsTextSelected();
|
||||
|
||||
const handleSetActiveLog = useCallback(
|
||||
(log: ILog, nextTab: SelectedTab = defaultTab): void => {
|
||||
if (isTextSelected()) {
|
||||
return;
|
||||
}
|
||||
if (activeLog?.id === log.id) {
|
||||
onClearActiveLog();
|
||||
setSelectedTab(undefined);
|
||||
@@ -39,7 +44,7 @@ function useLogDetailHandlers({
|
||||
onSetActiveLog(log);
|
||||
setSelectedTab(nextTab ?? defaultTab);
|
||||
},
|
||||
[activeLog?.id, defaultTab, onClearActiveLog, onSetActiveLog],
|
||||
[activeLog?.id, defaultTab, onClearActiveLog, onSetActiveLog, isTextSelected],
|
||||
);
|
||||
|
||||
const handleCloseLogDetail = useCallback((): void => {
|
||||
|
||||
10
frontend/src/hooks/useIsTextSelected.ts
Normal file
10
frontend/src/hooks/useIsTextSelected.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { useCallback } from 'react';
|
||||
|
||||
export function useIsTextSelected(): () => boolean {
|
||||
return useCallback((): boolean => {
|
||||
const selection = window.getSelection();
|
||||
return (
|
||||
!!selection && !selection.isCollapsed && selection.toString().length > 0
|
||||
);
|
||||
}, []);
|
||||
}
|
||||
@@ -22,9 +22,7 @@ import ROUTES from 'constants/routes';
|
||||
import dayjs, { Dayjs } from 'dayjs';
|
||||
import { useDashboardVariablesFromLocalStorage } from 'hooks/dashboard/useDashboardFromLocalStorage';
|
||||
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useTabVisibility from 'hooks/useTabFocus';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { getUpdatedLayout } from 'lib/dashboard/getUpdatedLayout';
|
||||
import { getMinMaxForSelectedTime } from 'lib/getMinMax';
|
||||
import { defaultTo, isEmpty } from 'lodash-es';
|
||||
@@ -50,11 +48,7 @@ import {
|
||||
setDashboardVariablesStore,
|
||||
updateDashboardVariablesStore,
|
||||
} from './store/dashboardVariables/dashboardVariablesStore';
|
||||
import {
|
||||
DashboardSortOrder,
|
||||
IDashboardContext,
|
||||
WidgetColumnWidths,
|
||||
} from './types';
|
||||
import { IDashboardContext, WidgetColumnWidths } from './types';
|
||||
import { sortLayout } from './util';
|
||||
|
||||
const DashboardContext = createContext<IDashboardContext>({
|
||||
@@ -71,13 +65,7 @@ const DashboardContext = createContext<IDashboardContext>({
|
||||
layouts: [],
|
||||
panelMap: {},
|
||||
setPanelMap: () => {},
|
||||
listSortOrder: {
|
||||
columnKey: 'createdAt',
|
||||
order: 'descend',
|
||||
pagination: '1',
|
||||
search: '',
|
||||
},
|
||||
setListSortOrder: () => {},
|
||||
|
||||
setLayouts: () => {},
|
||||
setSelectedDashboard: () => {},
|
||||
updatedTimeRef: {} as React.MutableRefObject<Dayjs | null>,
|
||||
@@ -101,7 +89,6 @@ interface Props {
|
||||
export function DashboardProvider({
|
||||
children,
|
||||
}: PropsWithChildren): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const [isDashboardSliderOpen, setIsDashboardSlider] = useState<boolean>(false);
|
||||
|
||||
const [toScrollWidgetId, setToScrollWidgetId] = useState<string>('');
|
||||
@@ -122,52 +109,8 @@ export function DashboardProvider({
|
||||
exact: true,
|
||||
});
|
||||
|
||||
const isDashboardListPage = useRouteMatch<Props>({
|
||||
path: ROUTES.ALL_DASHBOARD,
|
||||
exact: true,
|
||||
});
|
||||
|
||||
const { showErrorModal } = useErrorModal();
|
||||
|
||||
// added extra checks here in case wrong values appear use the default values rather than empty dashboards
|
||||
const supportedOrderColumnKeys = ['createdAt', 'updatedAt'];
|
||||
|
||||
const supportedOrderKeys = ['ascend', 'descend'];
|
||||
|
||||
const params = useUrlQuery();
|
||||
// since the dashboard provider is wrapped at the very top of the application hence it initialises these values from other pages as well.
|
||||
// pick the below params from URL only if the user is on the dashboards list page.
|
||||
const orderColumnParam = isDashboardListPage && params.get('columnKey');
|
||||
const orderQueryParam = isDashboardListPage && params.get('order');
|
||||
const paginationParam = isDashboardListPage && params.get('page');
|
||||
const searchParam = isDashboardListPage && params.get('search');
|
||||
|
||||
const [listSortOrder, setListOrder] = useState({
|
||||
columnKey: orderColumnParam
|
||||
? supportedOrderColumnKeys.includes(orderColumnParam)
|
||||
? orderColumnParam
|
||||
: 'updatedAt'
|
||||
: 'updatedAt',
|
||||
order: orderQueryParam
|
||||
? supportedOrderKeys.includes(orderQueryParam)
|
||||
? orderQueryParam
|
||||
: 'descend'
|
||||
: 'descend',
|
||||
pagination: paginationParam || '1',
|
||||
search: searchParam || '',
|
||||
});
|
||||
|
||||
function setListSortOrder(sortOrder: DashboardSortOrder): void {
|
||||
if (!isEqual(sortOrder, listSortOrder)) {
|
||||
setListOrder(sortOrder);
|
||||
}
|
||||
params.set('columnKey', sortOrder.columnKey as string);
|
||||
params.set('order', sortOrder.order as string);
|
||||
params.set('page', sortOrder.pagination || '1');
|
||||
params.set('search', sortOrder.search || '');
|
||||
safeNavigate({ search: params.toString() });
|
||||
}
|
||||
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
|
||||
const globalTime = useSelector<AppState, GlobalReducer>(
|
||||
@@ -502,8 +445,6 @@ export function DashboardProvider({
|
||||
selectedDashboard,
|
||||
dashboardId,
|
||||
layouts,
|
||||
listSortOrder,
|
||||
setListSortOrder,
|
||||
panelMap,
|
||||
setLayouts,
|
||||
setPanelMap,
|
||||
@@ -527,8 +468,6 @@ export function DashboardProvider({
|
||||
selectedDashboard,
|
||||
dashboardId,
|
||||
layouts,
|
||||
listSortOrder,
|
||||
setListSortOrder,
|
||||
panelMap,
|
||||
toScrollWidgetId,
|
||||
updateLocalStorageDashboardVariables,
|
||||
|
||||
@@ -4,13 +4,6 @@ import dayjs from 'dayjs';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||
|
||||
export interface DashboardSortOrder {
|
||||
columnKey: string;
|
||||
order: string;
|
||||
pagination: string;
|
||||
search: string;
|
||||
}
|
||||
|
||||
export type WidgetColumnWidths = {
|
||||
[widgetId: string]: Record<string, number>;
|
||||
};
|
||||
@@ -26,8 +19,6 @@ export interface IDashboardContext {
|
||||
layouts: Layout[];
|
||||
panelMap: Record<string, { widgets: Layout[]; collapsed: boolean }>;
|
||||
setPanelMap: React.Dispatch<React.SetStateAction<Record<string, any>>>;
|
||||
listSortOrder: DashboardSortOrder;
|
||||
setListSortOrder: (sortOrder: DashboardSortOrder) => void;
|
||||
setLayouts: React.Dispatch<React.SetStateAction<Layout[]>>;
|
||||
setSelectedDashboard: React.Dispatch<
|
||||
React.SetStateAction<Dashboard | undefined>
|
||||
|
||||
@@ -26,5 +26,22 @@ func (provider *provider) addAuthzRoutes(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/authz/resources", handler.New(provider.authZ.OpenAccess(provider.authzHandler.GetResources), handler.OpenAPIDef{
|
||||
ID: "AuthzResources",
|
||||
Tags: []string{"authz"},
|
||||
Summary: "Get resources",
|
||||
Description: "Gets all the available resources",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(authtypes.GettableResources),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: nil,
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ func (provider *provider) addGatewayRoutes(router *mux.Router) error {
|
||||
RequestContentType: "application/json",
|
||||
Response: new(gatewaytypes.GettableCreatedIngestionKey),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
|
||||
@@ -81,7 +81,7 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
|
||||
Response: new(metricsexplorertypes.MetricAttributesResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusInternalServerError},
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusNotFound, http.StatusInternalServerError},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
@@ -138,7 +138,7 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
|
||||
Response: new(metricsexplorertypes.MetricHighlightsResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusInternalServerError},
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusNotFound, http.StatusInternalServerError},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
@@ -157,7 +157,7 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
|
||||
Response: new(metricsexplorertypes.MetricAlertsResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusInternalServerError},
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusNotFound, http.StatusInternalServerError},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
@@ -176,7 +176,7 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
|
||||
Response: new(metricsexplorertypes.MetricDashboardsResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusInternalServerError},
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusNotFound, http.StatusInternalServerError},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
|
||||
@@ -45,23 +45,6 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/roles/resources", handler.New(provider.authZ.AdminAccess(provider.authzHandler.GetResources), handler.OpenAPIDef{
|
||||
ID: "GetResources",
|
||||
Tags: []string{"role"},
|
||||
Summary: "Get resources",
|
||||
Description: "Gets all the available resources for role assignment",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(roletypes.GettableResources),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Get), handler.OpenAPIDef{
|
||||
ID: "GetRole",
|
||||
Tags: []string{"role"},
|
||||
@@ -86,7 +69,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
|
||||
Description: "Gets all objects connected to the specified role via a given relation type",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: make([]*authtypes.Object, 0),
|
||||
Response: make([]*authtypes.GettableObjects, 0),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusNotFound, http.StatusNotImplemented, http.StatusUnavailableForLegalReasons},
|
||||
@@ -118,7 +101,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
|
||||
Tags: []string{"role"},
|
||||
Summary: "Patch objects for a role by relation",
|
||||
Description: "Patches the objects connected to the specified role via a given relation type",
|
||||
Request: new(roletypes.PatchableObjects),
|
||||
Request: new(authtypes.PatchableObjects),
|
||||
RequestContentType: "",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
|
||||
@@ -190,7 +190,7 @@ func (provider *provider) GetOrCreate(_ context.Context, _ valuer.UUID, _ *rolet
|
||||
}
|
||||
|
||||
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
|
||||
return nil
|
||||
return []*authtypes.Resource{}
|
||||
}
|
||||
|
||||
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
|
||||
|
||||
@@ -110,13 +110,13 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, objects)
|
||||
render.Success(rw, http.StatusOK, authtypes.NewGettableObjects(objects))
|
||||
}
|
||||
|
||||
func (handler *handler) GetResources(rw http.ResponseWriter, r *http.Request) {
|
||||
resources := handler.authz.GetResources(r.Context())
|
||||
|
||||
render.Success(rw, http.StatusOK, roletypes.NewGettableResources(resources))
|
||||
render.Success(rw, http.StatusOK, authtypes.NewGettableResources(resources))
|
||||
}
|
||||
|
||||
func (handler *handler) List(rw http.ResponseWriter, r *http.Request) {
|
||||
@@ -197,25 +197,30 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
req := new(roletypes.PatchableObjects)
|
||||
if err := binding.JSON.BindBody(r.Body, req); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
patchableObjects, err := role.NewPatchableObjects(req.Additions, req.Deletions, relation)
|
||||
if err := role.ErrIfManaged(); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
req := new(authtypes.PatchableObjects)
|
||||
if err := binding.JSON.BindBody(r.Body, req); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
additions, deletions, err := authtypes.NewPatchableObjects(req.Additions, req.Deletions, relation)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.authz.PatchObjects(ctx, valuer.MustNewUUID(claims.OrgID), role.Name, relation, patchableObjects.Additions, patchableObjects.Deletions)
|
||||
err = handler.authz.PatchObjects(ctx, valuer.MustNewUUID(claims.OrgID), role.Name, relation, additions, deletions)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
|
||||
@@ -3,6 +3,7 @@ package configflagger
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
@@ -32,6 +33,10 @@ func New(ctx context.Context, ps factory.ProviderSettings, c flagger.Config, reg
|
||||
for name, value := range c.Config.Boolean {
|
||||
feature, _, err := registry.GetByString(name)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
settings.Logger().WarnContext(ctx, "skipping unknown feature flag", "name", name, "kind", "boolean")
|
||||
continue
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -46,6 +51,10 @@ func New(ctx context.Context, ps factory.ProviderSettings, c flagger.Config, reg
|
||||
for name, value := range c.Config.String {
|
||||
feature, _, err := registry.GetByString(name)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
settings.Logger().WarnContext(ctx, "skipping unknown feature flag", "name", name, "kind", "string")
|
||||
continue
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -60,6 +69,10 @@ func New(ctx context.Context, ps factory.ProviderSettings, c flagger.Config, reg
|
||||
for name, value := range c.Config.Float {
|
||||
feature, _, err := registry.GetByString(name)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
settings.Logger().WarnContext(ctx, "skipping unknown feature flag", "name", name, "kind", "float")
|
||||
continue
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -74,6 +87,10 @@ func New(ctx context.Context, ps factory.ProviderSettings, c flagger.Config, reg
|
||||
for name, value := range c.Config.Integer {
|
||||
feature, _, err := registry.GetByString(name)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
settings.Logger().WarnContext(ctx, "skipping unknown feature flag", "name", name, "kind", "integer")
|
||||
continue
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -88,6 +105,10 @@ func New(ctx context.Context, ps factory.ProviderSettings, c flagger.Config, reg
|
||||
for name, value := range c.Config.Object {
|
||||
feature, _, err := registry.GetByString(name)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
settings.Logger().WarnContext(ctx, "skipping unknown feature flag", "name", name, "kind", "object")
|
||||
continue
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import "github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
var (
|
||||
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
|
||||
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
|
||||
FeatureHideRootUser = featuretypes.MustNewName("hide_root_user")
|
||||
)
|
||||
|
||||
func MustNewRegistry() featuretypes.Registry {
|
||||
@@ -25,6 +26,14 @@ func MustNewRegistry() featuretypes.Registry {
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
&featuretypes.Feature{
|
||||
Name: FeatureHideRootUser,
|
||||
Kind: featuretypes.KindBoolean,
|
||||
Stage: featuretypes.StageStable,
|
||||
Description: "Controls whether root admin user is hidden or not",
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
|
||||
@@ -122,7 +122,7 @@ func (handler *handler) CreateIngestionKey(rw http.ResponseWriter, r *http.Reque
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, response)
|
||||
render.Success(rw, http.StatusCreated, response)
|
||||
}
|
||||
|
||||
func (handler *handler) UpdateIngestionKey(rw http.ResponseWriter, r *http.Request) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package implmetricsexplorer
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
@@ -187,6 +188,12 @@ func (h *handler) GetMetricAlerts(rw http.ResponseWriter, req *http.Request) {
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
if err := h.checkMetricExists(req.Context(), orgID, metricName); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
out, err := h.module.GetMetricAlerts(req.Context(), orgID, metricName)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
@@ -209,6 +216,12 @@ func (h *handler) GetMetricDashboards(rw http.ResponseWriter, req *http.Request)
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
if err := h.checkMetricExists(req.Context(), orgID, metricName); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
out, err := h.module.GetMetricDashboards(req.Context(), orgID, metricName)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
@@ -231,6 +244,12 @@ func (h *handler) GetMetricHighlights(rw http.ResponseWriter, req *http.Request)
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
if err := h.checkMetricExists(req.Context(), orgID, metricName); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
highlights, err := h.module.GetMetricHighlights(req.Context(), orgID, metricName)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
@@ -266,6 +285,12 @@ func (h *handler) GetMetricAttributes(rw http.ResponseWriter, req *http.Request)
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
if err := h.checkMetricExists(req.Context(), orgID, metricName); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
out, err := h.module.GetMetricAttributes(req.Context(), orgID, &in)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
@@ -274,3 +299,14 @@ func (h *handler) GetMetricAttributes(rw http.ResponseWriter, req *http.Request)
|
||||
|
||||
render.Success(rw, http.StatusOK, out)
|
||||
}
|
||||
|
||||
func (h *handler) checkMetricExists(ctx context.Context, orgID valuer.UUID, metricName string) error {
|
||||
exists, err := h.module.CheckMetricExists(ctx, orgID, metricName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !exists {
|
||||
return errors.NewNotFoundf(errors.CodeNotFound, "metric not found: %q", metricName)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -404,6 +404,26 @@ func (m *module) GetMetricAttributes(ctx context.Context, orgID valuer.UUID, req
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (m *module) CheckMetricExists(ctx context.Context, orgID valuer.UUID, metricName string) (bool, error) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select("count(*) > 0 as metricExists")
|
||||
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.AttributesMetadataTableName))
|
||||
sb.Where(sb.E("metric_name", metricName))
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
var exists bool
|
||||
valueCtx := ctxtypes.SetClickhouseMaxThreads(ctx, m.config.TelemetryStore.Threads)
|
||||
|
||||
err := db.QueryRow(valueCtx, query, args...).Scan(&exists)
|
||||
if err != nil {
|
||||
return false, errors.WrapInternalf(err, errors.CodeInternal, "failed to check if metric exists")
|
||||
}
|
||||
|
||||
return exists, nil
|
||||
}
|
||||
|
||||
func (m *module) fetchMetadataFromCache(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, []string) {
|
||||
hits := make(map[string]*metricsexplorertypes.MetricMetadata)
|
||||
misses := make([]string, 0)
|
||||
|
||||
@@ -23,6 +23,7 @@ type Handler interface {
|
||||
|
||||
// Module represents the metrics module interface.
|
||||
type Module interface {
|
||||
CheckMetricExists(ctx context.Context, orgID valuer.UUID, metricName string) (bool, error)
|
||||
ListMetrics(ctx context.Context, orgID valuer.UUID, params *metricsexplorertypes.ListMetricsParams) (*metricsexplorertypes.ListMetricsResponse, error)
|
||||
GetStats(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.StatsRequest) (*metricsexplorertypes.StatsResponse, error)
|
||||
GetTreemap(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.TreemapRequest) (*metricsexplorertypes.TreemapResponse, error)
|
||||
|
||||
@@ -120,6 +120,8 @@ func FilterResponse(results []*qbtypes.QueryRangeResponse) []*qbtypes.QueryRange
|
||||
}
|
||||
}
|
||||
resultData.Rows = filteredRows
|
||||
case *qbtypes.ScalarData:
|
||||
resultData.Data = filterScalarDataIPs(resultData.Columns, resultData.Data)
|
||||
}
|
||||
|
||||
filteredData = append(filteredData, result)
|
||||
@@ -145,6 +147,39 @@ func shouldIncludeSeries(series *qbtypes.TimeSeries) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func filterScalarDataIPs(columns []*qbtypes.ColumnDescriptor, data [][]any) [][]any {
|
||||
// Find column indices for server address fields
|
||||
serverColIndices := make([]int, 0)
|
||||
for i, col := range columns {
|
||||
if col.Name == derivedKeyHTTPHost {
|
||||
serverColIndices = append(serverColIndices, i)
|
||||
}
|
||||
}
|
||||
|
||||
if len(serverColIndices) == 0 {
|
||||
return data
|
||||
}
|
||||
|
||||
filtered := make([][]any, 0, len(data))
|
||||
for _, row := range data {
|
||||
includeRow := true
|
||||
for _, colIdx := range serverColIndices {
|
||||
if colIdx < len(row) {
|
||||
if strVal, ok := row[colIdx].(string); ok {
|
||||
if net.ParseIP(strVal) != nil {
|
||||
includeRow = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if includeRow {
|
||||
filtered = append(filtered, row)
|
||||
}
|
||||
}
|
||||
return filtered
|
||||
}
|
||||
|
||||
func shouldIncludeRow(row *qbtypes.RawRow) bool {
|
||||
if row.Data != nil {
|
||||
if domainVal, ok := row.Data[derivedKeyHTTPHost]; ok {
|
||||
|
||||
@@ -117,6 +117,59 @@ func TestFilterResponse(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "should filter out IP addresses from scalar data",
|
||||
input: []*qbtypes.QueryRangeResponse{
|
||||
{
|
||||
Data: qbtypes.QueryData{
|
||||
Results: []any{
|
||||
&qbtypes.ScalarData{
|
||||
QueryName: "endpoints",
|
||||
Columns: []*qbtypes.ColumnDescriptor{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
|
||||
Type: qbtypes.ColumnTypeGroup,
|
||||
},
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "endpoints"},
|
||||
Type: qbtypes.ColumnTypeAggregation,
|
||||
},
|
||||
},
|
||||
Data: [][]any{
|
||||
{"192.168.1.1", 10},
|
||||
{"example.com", 20},
|
||||
{"10.0.0.1", 5},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []*qbtypes.QueryRangeResponse{
|
||||
{
|
||||
Data: qbtypes.QueryData{
|
||||
Results: []any{
|
||||
&qbtypes.ScalarData{
|
||||
QueryName: "endpoints",
|
||||
Columns: []*qbtypes.ColumnDescriptor{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
|
||||
Type: qbtypes.ColumnTypeGroup,
|
||||
},
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "endpoints"},
|
||||
Type: qbtypes.ColumnTypeAggregation,
|
||||
},
|
||||
},
|
||||
Data: [][]any{
|
||||
{"example.com", 20},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
@@ -2,18 +2,22 @@ package impluser
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type getter struct {
|
||||
store types.UserStore
|
||||
store types.UserStore
|
||||
flagger flagger.Flagger
|
||||
}
|
||||
|
||||
func NewGetter(store types.UserStore) user.Getter {
|
||||
return &getter{store: store}
|
||||
func NewGetter(store types.UserStore, flagger flagger.Flagger) user.Getter {
|
||||
return &getter{store: store, flagger: flagger}
|
||||
}
|
||||
|
||||
func (module *getter) GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*types.User, error) {
|
||||
@@ -26,6 +30,14 @@ func (module *getter) ListByOrgID(ctx context.Context, orgID valuer.UUID) ([]*ty
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// filter root users if feature flag `hide_root_users` is true
|
||||
evalCtx := featuretypes.NewFlaggerEvaluationContext(orgID)
|
||||
hideRootUsers := module.flagger.BooleanOrEmpty(ctx, flagger.FeatureHideRootUser, evalCtx)
|
||||
|
||||
if hideRootUsers {
|
||||
users = slices.DeleteFunc(users, func(user *types.User) bool { return user.IsRoot })
|
||||
}
|
||||
|
||||
return users, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -11,8 +11,11 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager/signozalertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/emailing/emailingtest"
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/sharder"
|
||||
@@ -41,7 +44,13 @@ func TestNewHandlers(t *testing.T) {
|
||||
queryParser := queryparser.New(providerSettings)
|
||||
require.NoError(t, err)
|
||||
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
|
||||
|
||||
flagger, err := flagger.New(context.Background(), instrumentationtest.New().ToProviderSettings(), flagger.Config{}, flagger.MustNewRegistry())
|
||||
require.NoError(t, err)
|
||||
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings), flagger)
|
||||
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, userGetter)
|
||||
|
||||
querierHandler := querier.NewHandler(providerSettings, nil, nil)
|
||||
handlers := NewHandlers(modules, providerSettings, nil, querierHandler, nil, nil, nil, nil, nil, nil, nil)
|
||||
|
||||
@@ -85,11 +85,11 @@ func NewModules(
|
||||
queryParser queryparser.QueryParser,
|
||||
config Config,
|
||||
dashboard dashboard.Module,
|
||||
userGetter user.Getter,
|
||||
) Modules {
|
||||
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
|
||||
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
|
||||
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, authz, analytics, config.User)
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
|
||||
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
|
||||
|
||||
return Modules{
|
||||
|
||||
@@ -11,8 +11,11 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager/signozalertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/emailing/emailingtest"
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/sharder"
|
||||
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
|
||||
@@ -40,7 +43,13 @@ func TestNewModules(t *testing.T) {
|
||||
queryParser := queryparser.New(providerSettings)
|
||||
require.NoError(t, err)
|
||||
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
|
||||
|
||||
flagger, err := flagger.New(context.Background(), instrumentationtest.New().ToProviderSettings(), flagger.Config{}, flagger.MustNewRegistry())
|
||||
require.NoError(t, err)
|
||||
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings), flagger)
|
||||
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, userGetter)
|
||||
|
||||
reflectVal := reflect.ValueOf(modules)
|
||||
for i := 0; i < reflectVal.NumField(); i++ {
|
||||
|
||||
@@ -169,6 +169,7 @@ func NewSQLMigrationProviderFactories(
|
||||
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
|
||||
sqlmigration.NewAddRootUserFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewAddUserEmailOrgIDIndexFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewMigrateRulesV4ToV5Factory(sqlstore, telemetryStore),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
package signoz
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/DATA-DOG/go-sqlmock"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/nfmanagertest"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
@@ -75,7 +77,12 @@ func TestNewProviderFactories(t *testing.T) {
|
||||
})
|
||||
|
||||
assert.NotPanics(t, func() {
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual), instrumentationtest.New().ToProviderSettings()))
|
||||
flagger, err := flagger.New(context.Background(), instrumentationtest.New().ToProviderSettings(), flagger.Config{}, flagger.MustNewRegistry())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual), instrumentationtest.New().ToProviderSettings()), flagger)
|
||||
orgGetter := implorganization.NewGetter(implorganization.NewStore(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual)), nil)
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{Provider: "clickhouse"}, sqlmock.QueryMatcherEqual)
|
||||
NewStatsReporterProviderFactories(telemetryStore, []statsreporter.StatsCollector{}, orgGetter, userGetter, tokenizertest.NewMockTokenizer(t), version.Build{}, analytics.Config{Enabled: true})
|
||||
|
||||
@@ -280,7 +280,7 @@ func New(
|
||||
}
|
||||
|
||||
// Initialize user getter
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings), flagger)
|
||||
|
||||
licensingProviderFactory := licenseProviderFactory(sqlstore, zeus, orgGetter, analytics)
|
||||
licensing, err := licensingProviderFactory.New(
|
||||
@@ -388,7 +388,7 @@ func New(
|
||||
}
|
||||
|
||||
// Initialize all modules
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard, userGetter)
|
||||
|
||||
userService := impluser.NewService(providerSettings, impluser.NewStore(sqlstore, providerSettings), modules.User, orgGetter, authz, config.User.Root)
|
||||
|
||||
|
||||
209
pkg/sqlmigration/066_migrate_rules_v4_to_v5_post_deprecation.go
Normal file
209
pkg/sqlmigration/066_migrate_rules_v4_to_v5_post_deprecation.go
Normal file
@@ -0,0 +1,209 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/transition"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type migrateRulesV4ToV5 struct {
|
||||
store sqlstore.SQLStore
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
logger *slog.Logger
|
||||
}
|
||||
|
||||
func NewMigrateRulesV4ToV5Factory(
|
||||
store sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(
|
||||
factory.MustNewName("migrate_rules_post_deprecation"),
|
||||
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return &migrateRulesV4ToV5{
|
||||
store: store,
|
||||
telemetryStore: telemetryStore,
|
||||
logger: ps.Logger,
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (migration *migrateRulesV4ToV5) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *migrateRulesV4ToV5) getLogDuplicateKeys(ctx context.Context) ([]string, error) {
|
||||
query := `
|
||||
SELECT name
|
||||
FROM (
|
||||
SELECT DISTINCT name FROM signoz_logs.distributed_logs_attribute_keys
|
||||
INTERSECT
|
||||
SELECT DISTINCT name FROM signoz_logs.distributed_logs_resource_keys
|
||||
)
|
||||
ORDER BY name
|
||||
`
|
||||
|
||||
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
|
||||
if err != nil {
|
||||
migration.logger.WarnContext(ctx, "failed to query log duplicate keys", "error", err)
|
||||
return nil, nil
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var keys []string
|
||||
for rows.Next() {
|
||||
var key string
|
||||
if err := rows.Scan(&key); err != nil {
|
||||
migration.logger.WarnContext(ctx, "failed to scan log duplicate key", "error", err)
|
||||
continue
|
||||
}
|
||||
keys = append(keys, key)
|
||||
}
|
||||
|
||||
return keys, nil
|
||||
}
|
||||
|
||||
func (migration *migrateRulesV4ToV5) getTraceDuplicateKeys(ctx context.Context) ([]string, error) {
|
||||
query := `
|
||||
SELECT tagKey
|
||||
FROM signoz_traces.distributed_span_attributes_keys
|
||||
WHERE tagType IN ('tag', 'resource')
|
||||
GROUP BY tagKey
|
||||
HAVING COUNT(DISTINCT tagType) > 1
|
||||
ORDER BY tagKey
|
||||
`
|
||||
|
||||
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
|
||||
if err != nil {
|
||||
migration.logger.WarnContext(ctx, "failed to query trace duplicate keys", "error", err)
|
||||
return nil, nil
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var keys []string
|
||||
for rows.Next() {
|
||||
var key string
|
||||
if err := rows.Scan(&key); err != nil {
|
||||
migration.logger.WarnContext(ctx, "failed to scan trace duplicate key", "error", err)
|
||||
continue
|
||||
}
|
||||
keys = append(keys, key)
|
||||
}
|
||||
|
||||
return keys, nil
|
||||
}
|
||||
|
||||
func (migration *migrateRulesV4ToV5) Up(ctx context.Context, db *bun.DB) error {
|
||||
logsKeys, err := migration.getLogDuplicateKeys(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tracesKeys, err := migration.getTraceDuplicateKeys(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
var rules []struct {
|
||||
ID string `bun:"id"`
|
||||
Data map[string]any `bun:"data"`
|
||||
}
|
||||
|
||||
err = tx.NewSelect().
|
||||
Table("rule").
|
||||
Column("id", "data").
|
||||
Scan(ctx, &rules)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
alertsMigrator := transition.NewAlertMigrateV5(migration.logger, logsKeys, tracesKeys)
|
||||
|
||||
count := 0
|
||||
|
||||
for _, rule := range rules {
|
||||
version, _ := rule.Data["version"].(string)
|
||||
|
||||
if version == "v5" {
|
||||
continue
|
||||
}
|
||||
|
||||
if version == "" {
|
||||
migration.logger.WarnContext(ctx, "unexpected empty version for rule", "rule_id", rule.ID)
|
||||
}
|
||||
|
||||
migration.logger.InfoContext(ctx, "migrating rule v4 to v5", "rule_id", rule.ID, "current_version", version)
|
||||
|
||||
// Check if the queries envelope already exists and is non-empty
|
||||
hasQueriesEnvelope := false
|
||||
if condition, ok := rule.Data["condition"].(map[string]any); ok {
|
||||
if compositeQuery, ok := condition["compositeQuery"].(map[string]any); ok {
|
||||
if queries, ok := compositeQuery["queries"].([]any); ok && len(queries) > 0 {
|
||||
hasQueriesEnvelope = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if hasQueriesEnvelope {
|
||||
// already has queries envelope, just bump version
|
||||
// this is because user made a mistake of choosing version
|
||||
migration.logger.InfoContext(ctx, "rule already has queries envelope, bumping version", "rule_id", rule.ID)
|
||||
rule.Data["version"] = "v5"
|
||||
} else {
|
||||
// old format, run full migration
|
||||
migration.logger.InfoContext(ctx, "rule has old format, running full migration", "rule_id", rule.ID)
|
||||
updated := alertsMigrator.Migrate(ctx, rule.Data)
|
||||
if !updated {
|
||||
migration.logger.WarnContext(ctx, "expected updated to be true but got false", "rule_id", rule.ID)
|
||||
continue
|
||||
}
|
||||
rule.Data["version"] = "v5"
|
||||
}
|
||||
|
||||
dataJSON, err := json.Marshal(rule.Data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = tx.NewUpdate().
|
||||
Table("rule").
|
||||
Set("data = ?", string(dataJSON)).
|
||||
Where("id = ?", rule.ID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
count++
|
||||
}
|
||||
if count != 0 {
|
||||
migration.logger.InfoContext(ctx, "migrate v4 alerts", "count", count)
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
func (migration *migrateRulesV4ToV5) Down(ctx context.Context, db *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package authtypes
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"regexp"
|
||||
|
||||
@@ -10,8 +11,10 @@ import (
|
||||
var (
|
||||
nameRegex = regexp.MustCompile("^[a-z-]{1,50}$")
|
||||
|
||||
_ json.Marshaler = new(Name)
|
||||
_ json.Unmarshaler = new(Name)
|
||||
_ json.Marshaler = new(Name)
|
||||
_ json.Unmarshaler = new(Name)
|
||||
_ encoding.TextMarshaler = new(Name)
|
||||
_ encoding.TextUnmarshaler = new(Name)
|
||||
)
|
||||
|
||||
type Name struct {
|
||||
@@ -58,3 +61,16 @@ func (name *Name) UnmarshalJSON(data []byte) error {
|
||||
*name = shadow
|
||||
return nil
|
||||
}
|
||||
|
||||
func (name Name) MarshalText() ([]byte, error) {
|
||||
return []byte(name.val), nil
|
||||
}
|
||||
|
||||
func (name *Name) UnmarshalText(text []byte) error {
|
||||
shadow, err := NewName(string(text))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*name = shadow
|
||||
return nil
|
||||
}
|
||||
|
||||
177
pkg/types/authtypes/object.go
Normal file
177
pkg/types/authtypes/object.go
Normal file
@@ -0,0 +1,177 @@
|
||||
package authtypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
)
|
||||
|
||||
type Resource struct {
|
||||
Name Name `json:"name" required:"true"`
|
||||
Type Type `json:"type" required:"true"`
|
||||
}
|
||||
|
||||
type GettableResources struct {
|
||||
Resources []*Resource `json:"resources" required:"true" nullable:"false"`
|
||||
Relations map[Relation][]Type `json:"relations" required:"true"`
|
||||
}
|
||||
|
||||
type Object struct {
|
||||
Resource Resource `json:"resource" required:"true"`
|
||||
Selector Selector `json:"selector" required:"true"`
|
||||
}
|
||||
|
||||
type GettableObjects struct {
|
||||
Resource Resource `json:"resource" required:"true"`
|
||||
Selectors []Selector `json:"selectors" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
type PatchableObjects struct {
|
||||
Additions []*GettableObjects `json:"additions" required:"true" nullable:"true"`
|
||||
Deletions []*GettableObjects `json:"deletions" required:"true" nullable:"true"`
|
||||
}
|
||||
|
||||
func NewObject(resource Resource, selector Selector) (*Object, error) {
|
||||
err := IsValidSelector(resource.Type, selector.String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Object{Resource: resource, Selector: selector}, nil
|
||||
}
|
||||
|
||||
func NewObjectsFromGettableObjects(patchableObjects []*GettableObjects) ([]*Object, error) {
|
||||
objects := make([]*Object, 0)
|
||||
|
||||
for _, patchObject := range patchableObjects {
|
||||
for _, selector := range patchObject.Selectors {
|
||||
object, err := NewObject(patchObject.Resource, selector)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
objects = append(objects, object)
|
||||
}
|
||||
}
|
||||
|
||||
return objects, nil
|
||||
}
|
||||
|
||||
func NewPatchableObjects(additions []*GettableObjects, deletions []*GettableObjects, relation Relation) ([]*Object, []*Object, error) {
|
||||
if len(additions) == 0 && len(deletions) == 0 {
|
||||
return nil, nil, errors.New(errors.TypeInvalidInput, ErrCodeInvalidPatchObject, "empty object patch request received, at least one of additions or deletions must be present")
|
||||
}
|
||||
|
||||
for _, object := range additions {
|
||||
if !slices.Contains(TypeableRelations[object.Resource.Type], relation) {
|
||||
return nil, nil, errors.Newf(errors.TypeInvalidInput, ErrCodeAuthZInvalidRelation, "relation %s is invalid for type %s", relation.StringValue(), object.Resource.Type.StringValue())
|
||||
}
|
||||
}
|
||||
|
||||
for _, object := range deletions {
|
||||
if !slices.Contains(TypeableRelations[object.Resource.Type], relation) {
|
||||
return nil, nil, errors.Newf(errors.TypeInvalidInput, ErrCodeAuthZInvalidRelation, "relation %s is invalid for type %s", relation.StringValue(), object.Resource.Type.StringValue())
|
||||
}
|
||||
}
|
||||
|
||||
additionObjects, err := NewObjectsFromGettableObjects(additions)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
deletionsObjects, err := NewObjectsFromGettableObjects(deletions)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return additionObjects, deletionsObjects, nil
|
||||
}
|
||||
|
||||
func NewGettableResources(resources []*Resource) *GettableResources {
|
||||
return &GettableResources{
|
||||
Resources: resources,
|
||||
Relations: RelationsTypeable,
|
||||
}
|
||||
}
|
||||
|
||||
func NewGettableObjects(objects []*Object) []*GettableObjects {
|
||||
grouped := make(map[Resource][]Selector)
|
||||
for _, obj := range objects {
|
||||
key := obj.Resource
|
||||
if _, ok := grouped[key]; !ok {
|
||||
grouped[key] = make([]Selector, 0)
|
||||
}
|
||||
|
||||
grouped[key] = append(grouped[key], obj.Selector)
|
||||
}
|
||||
|
||||
gettableObjects := make([]*GettableObjects, 0, len(grouped))
|
||||
for resource, selectors := range grouped {
|
||||
gettableObjects = append(gettableObjects, &GettableObjects{
|
||||
Resource: resource,
|
||||
Selectors: selectors,
|
||||
})
|
||||
}
|
||||
|
||||
return gettableObjects
|
||||
}
|
||||
|
||||
func MustNewObject(resource Resource, selector Selector) *Object {
|
||||
object, err := NewObject(resource, selector)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return object
|
||||
}
|
||||
|
||||
func MustNewObjectFromString(input string) *Object {
|
||||
parts := strings.Split(input, "/")
|
||||
if len(parts) != 4 {
|
||||
panic(errors.Newf(errors.TypeInternal, errors.CodeInternal, "invalid input format: %s", input))
|
||||
}
|
||||
|
||||
typeParts := strings.Split(parts[0], ":")
|
||||
if len(typeParts) != 2 {
|
||||
panic(errors.Newf(errors.TypeInternal, errors.CodeInternal, "invalid type format: %s", parts[0]))
|
||||
}
|
||||
|
||||
resource := Resource{
|
||||
Type: MustNewType(typeParts[0]),
|
||||
Name: MustNewName(parts[2]),
|
||||
}
|
||||
|
||||
selector := MustNewSelector(resource.Type, parts[3])
|
||||
|
||||
return &Object{Resource: resource, Selector: selector}
|
||||
}
|
||||
|
||||
func MustNewObjectsFromStringSlice(input []string) []*Object {
|
||||
objects := make([]*Object, 0, len(input))
|
||||
for _, str := range input {
|
||||
objects = append(objects, MustNewObjectFromString(str))
|
||||
}
|
||||
return objects
|
||||
}
|
||||
|
||||
func (object *Object) UnmarshalJSON(data []byte) error {
|
||||
var shadow = struct {
|
||||
Resource Resource
|
||||
Selector Selector
|
||||
}{}
|
||||
|
||||
err := json.Unmarshal(data, &shadow)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
obj, err := NewObject(shadow.Resource, shadow.Selector)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
*object = *obj
|
||||
return nil
|
||||
}
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
|
||||
var (
|
||||
ErrCodeAuthZInvalidRelation = errors.MustNewCode("authz_invalid_relation")
|
||||
ErrCodeInvalidPatchObject = errors.MustNewCode("authz_invalid_patch_objects")
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -26,6 +27,14 @@ var TypeableRelations = map[Type][]Relation{
|
||||
TypeMetaResources: {RelationCreate, RelationList},
|
||||
}
|
||||
|
||||
var RelationsTypeable = map[Relation][]Type{
|
||||
RelationCreate: {TypeMetaResources},
|
||||
RelationRead: {TypeUser, TypeRole, TypeOrganization, TypeMetaResource},
|
||||
RelationList: {TypeMetaResources},
|
||||
RelationUpdate: {TypeUser, TypeRole, TypeOrganization, TypeMetaResource},
|
||||
RelationDelete: {TypeUser, TypeRole, TypeOrganization, TypeMetaResource},
|
||||
}
|
||||
|
||||
type Relation struct{ valuer.String }
|
||||
|
||||
func NewRelation(relation string) (Relation, error) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package authtypes
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"regexp"
|
||||
@@ -15,8 +16,10 @@ var (
|
||||
)
|
||||
|
||||
var (
|
||||
_ json.Marshaler = new(Selector)
|
||||
_ json.Unmarshaler = new(Selector)
|
||||
_ json.Marshaler = new(Selector)
|
||||
_ json.Unmarshaler = new(Selector)
|
||||
_ encoding.TextMarshaler = new(Selector)
|
||||
_ encoding.TextUnmarshaler = new(Selector)
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -79,6 +82,15 @@ func (typed *Selector) UnmarshalJSON(data []byte) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (selector Selector) MarshalText() ([]byte, error) {
|
||||
return []byte(selector.val), nil
|
||||
}
|
||||
|
||||
func (selector *Selector) UnmarshalText(text []byte) error {
|
||||
*selector = Selector{val: string(text)}
|
||||
return nil
|
||||
}
|
||||
|
||||
func IsValidSelector(typed Type, selector string) error {
|
||||
switch typed {
|
||||
case TypeUser:
|
||||
|
||||
@@ -3,24 +3,13 @@ package authtypes
|
||||
import (
|
||||
"encoding/json"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Resource struct {
|
||||
Name Name `json:"name" required:"true"`
|
||||
Type Type `json:"type" required:"true"`
|
||||
}
|
||||
|
||||
type Object struct {
|
||||
Resource Resource `json:"resource" required:"true"`
|
||||
Selector Selector `json:"selector" required:"true"`
|
||||
}
|
||||
|
||||
type Transaction struct {
|
||||
ID valuer.UUID `json:"id"`
|
||||
ID valuer.UUID `json:"-"`
|
||||
Relation Relation `json:"relation" required:"true"`
|
||||
Object Object `json:"object" required:"true"`
|
||||
}
|
||||
@@ -31,53 +20,6 @@ type GettableTransaction struct {
|
||||
Authorized bool `json:"authorized" required:"true"`
|
||||
}
|
||||
|
||||
func NewObject(resource Resource, selector Selector) (*Object, error) {
|
||||
err := IsValidSelector(resource.Type, selector.val)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Object{Resource: resource, Selector: selector}, nil
|
||||
}
|
||||
|
||||
func MustNewObject(resource Resource, selector Selector) *Object {
|
||||
object, err := NewObject(resource, selector)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return object
|
||||
}
|
||||
|
||||
func MustNewObjectFromString(input string) *Object {
|
||||
parts := strings.Split(input, "/")
|
||||
if len(parts) != 4 {
|
||||
panic(errors.Newf(errors.TypeInternal, errors.CodeInternal, "invalid input format: %s", input))
|
||||
}
|
||||
|
||||
typeParts := strings.Split(parts[0], ":")
|
||||
if len(typeParts) != 2 {
|
||||
panic(errors.Newf(errors.TypeInternal, errors.CodeInternal, "invalid type format: %s", parts[0]))
|
||||
}
|
||||
|
||||
resource := Resource{
|
||||
Type: MustNewType(typeParts[0]),
|
||||
Name: MustNewName(parts[2]),
|
||||
}
|
||||
|
||||
selector := MustNewSelector(resource.Type, parts[3])
|
||||
|
||||
return &Object{Resource: resource, Selector: selector}
|
||||
}
|
||||
|
||||
func MustNewObjectsFromStringSlice(input []string) []*Object {
|
||||
objects := make([]*Object, 0, len(input))
|
||||
for _, str := range input {
|
||||
objects = append(objects, MustNewObjectFromString(str))
|
||||
}
|
||||
return objects
|
||||
}
|
||||
|
||||
func NewTransaction(relation Relation, object Object) (*Transaction, error) {
|
||||
if !slices.Contains(TypeableRelations[object.Resource.Type], relation) {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, ErrCodeAuthZInvalidRelation, "invalid relation %s for type %s", relation.StringValue(), object.Resource.Type.StringValue())
|
||||
@@ -100,26 +42,6 @@ func NewGettableTransaction(transactions []*Transaction, results map[string]*Tup
|
||||
return gettableTransactions
|
||||
}
|
||||
|
||||
func (object *Object) UnmarshalJSON(data []byte) error {
|
||||
var shadow = struct {
|
||||
Resource Resource
|
||||
Selector Selector
|
||||
}{}
|
||||
|
||||
err := json.Unmarshal(data, &shadow)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
obj, err := NewObject(shadow.Resource, shadow.Selector)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
*object = *obj
|
||||
return nil
|
||||
}
|
||||
|
||||
func (transaction *Transaction) UnmarshalJSON(data []byte) error {
|
||||
var shadow = struct {
|
||||
Relation Relation
|
||||
|
||||
@@ -33,8 +33,8 @@ type LimitConfig struct {
|
||||
}
|
||||
|
||||
type LimitValue struct {
|
||||
Size int64 `json:"size"`
|
||||
Count int64 `json:"count"`
|
||||
Size *int64 `json:"size,omitempty"`
|
||||
Count *int64 `json:"count,omitempty"`
|
||||
}
|
||||
|
||||
type LimitMetric struct {
|
||||
|
||||
@@ -5,5 +5,5 @@ import (
|
||||
)
|
||||
|
||||
type Identifiable struct {
|
||||
ID valuer.UUID `json:"id" bun:"id,pk,type:text"`
|
||||
ID valuer.UUID `json:"id" bun:"id,pk,type:text" required:"true"`
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ package roletypes
|
||||
import (
|
||||
"encoding/json"
|
||||
"regexp"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
@@ -84,16 +83,6 @@ type PatchableRole struct {
|
||||
Description string `json:"description" required:"true"`
|
||||
}
|
||||
|
||||
type PatchableObjects struct {
|
||||
Additions []*authtypes.Object `json:"additions" required:"true"`
|
||||
Deletions []*authtypes.Object `json:"deletions" required:"true"`
|
||||
}
|
||||
|
||||
type GettableResources struct {
|
||||
Resources []*authtypes.Resource `json:"resources" required:"true"`
|
||||
Relations map[authtypes.Type][]authtypes.Relation `json:"relations" required:"true"`
|
||||
}
|
||||
|
||||
func NewStorableRoleFromRole(role *Role) *StorableRole {
|
||||
return &StorableRole{
|
||||
Identifiable: role.Identifiable,
|
||||
@@ -142,15 +131,8 @@ func NewManagedRoles(orgID valuer.UUID) []*Role {
|
||||
|
||||
}
|
||||
|
||||
func NewGettableResources(resources []*authtypes.Resource) *GettableResources {
|
||||
return &GettableResources{
|
||||
Resources: resources,
|
||||
Relations: authtypes.TypeableRelations,
|
||||
}
|
||||
}
|
||||
|
||||
func (role *Role) PatchMetadata(description string) error {
|
||||
err := role.CanEditDelete()
|
||||
err := role.ErrIfManaged()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -160,32 +142,7 @@ func (role *Role) PatchMetadata(description string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (role *Role) NewPatchableObjects(additions []*authtypes.Object, deletions []*authtypes.Object, relation authtypes.Relation) (*PatchableObjects, error) {
|
||||
err := role.CanEditDelete()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(additions) == 0 && len(deletions) == 0 {
|
||||
return nil, errors.New(errors.TypeInvalidInput, ErrCodeRoleEmptyPatch, "empty object patch request received, at least one of additions or deletions must be present")
|
||||
}
|
||||
|
||||
for _, object := range additions {
|
||||
if !slices.Contains(authtypes.TypeableRelations[object.Resource.Type], relation) {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, authtypes.ErrCodeAuthZInvalidRelation, "relation %s is invalid for type %s", relation.StringValue(), object.Resource.Type.StringValue())
|
||||
}
|
||||
}
|
||||
|
||||
for _, object := range deletions {
|
||||
if !slices.Contains(authtypes.TypeableRelations[object.Resource.Type], relation) {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, authtypes.ErrCodeAuthZInvalidRelation, "relation %s is invalid for type %s", relation.StringValue(), object.Resource.Type.StringValue())
|
||||
}
|
||||
}
|
||||
|
||||
return &PatchableObjects{Additions: additions, Deletions: deletions}, nil
|
||||
}
|
||||
|
||||
func (role *Role) CanEditDelete() error {
|
||||
func (role *Role) ErrIfManaged() error {
|
||||
if role.Type == RoleTypeManaged {
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeRoleInvalidInput, "cannot edit/delete managed role: %s", role.Name)
|
||||
}
|
||||
|
||||
@@ -355,6 +355,10 @@ func (r *PostableRule) validate() error {
|
||||
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "composite query is required"))
|
||||
}
|
||||
|
||||
if r.Version != "v5" {
|
||||
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "only version v5 is supported, got %q", r.Version))
|
||||
}
|
||||
|
||||
if isAllQueriesDisabled(r.RuleCondition.CompositeQuery) {
|
||||
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "all queries are disabled in rule condition"))
|
||||
}
|
||||
|
||||
@@ -108,6 +108,7 @@ func TestParseIntoRule(t *testing.T) {
|
||||
"ruleType": "threshold_rule",
|
||||
"evalWindow": "5m",
|
||||
"frequency": "1m",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -150,6 +151,7 @@ func TestParseIntoRule(t *testing.T) {
|
||||
content: []byte(`{
|
||||
"alert": "DefaultsRule",
|
||||
"ruleType": "threshold_rule",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -187,6 +189,7 @@ func TestParseIntoRule(t *testing.T) {
|
||||
initRule: PostableRule{},
|
||||
content: []byte(`{
|
||||
"alert": "PromQLRule",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "promql",
|
||||
@@ -256,6 +259,7 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
|
||||
content: []byte(`{
|
||||
"alert": "SeverityLabelTest",
|
||||
"schemaVersion": "v1",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -344,6 +348,7 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
|
||||
content: []byte(`{
|
||||
"alert": "NoLabelsTest",
|
||||
"schemaVersion": "v1",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -384,6 +389,7 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
|
||||
content: []byte(`{
|
||||
"alert": "OverwriteTest",
|
||||
"schemaVersion": "v1",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -474,6 +480,7 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
|
||||
content: []byte(`{
|
||||
"alert": "V2Test",
|
||||
"schemaVersion": "v2",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -517,6 +524,7 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
|
||||
initRule: PostableRule{},
|
||||
content: []byte(`{
|
||||
"alert": "DefaultSchemaTest",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -569,6 +577,7 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
|
||||
func TestParseIntoRuleThresholdGeneration(t *testing.T) {
|
||||
content := []byte(`{
|
||||
"alert": "TestThresholds",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -639,6 +648,7 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
|
||||
"schemaVersion": "v2",
|
||||
"alert": "MultiThresholdAlert",
|
||||
"ruleType": "threshold_rule",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -732,6 +742,7 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
ruleJSON: []byte(`{
|
||||
"alert": "AnomalyBelowTest",
|
||||
"ruleType": "anomaly_rule",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -766,6 +777,7 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
ruleJSON: []byte(`{
|
||||
"alert": "AnomalyBelowTest",
|
||||
"ruleType": "anomaly_rule",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -799,6 +811,7 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
ruleJSON: []byte(`{
|
||||
"alert": "AnomalyAboveTest",
|
||||
"ruleType": "anomaly_rule",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -833,6 +846,7 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
ruleJSON: []byte(`{
|
||||
"alert": "AnomalyAboveTest",
|
||||
"ruleType": "anomaly_rule",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -866,6 +880,7 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
ruleJSON: []byte(`{
|
||||
"alert": "AnomalyBelowAllTest",
|
||||
"ruleType": "anomaly_rule",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -901,6 +916,7 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
ruleJSON: []byte(`{
|
||||
"alert": "AnomalyBelowAllTest",
|
||||
"ruleType": "anomaly_rule",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -935,6 +951,7 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
ruleJSON: []byte(`{
|
||||
"alert": "AnomalyOutOfBoundsTest",
|
||||
"ruleType": "anomaly_rule",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -969,6 +986,7 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
ruleJSON: []byte(`{
|
||||
"alert": "ThresholdTest",
|
||||
"ruleType": "threshold_rule",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
@@ -1003,6 +1021,7 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
ruleJSON: []byte(`{
|
||||
"alert": "ThresholdTest",
|
||||
"ruleType": "threshold_rule",
|
||||
"version": "v5",
|
||||
"condition": {
|
||||
"compositeQuery": {
|
||||
"queryType": "builder",
|
||||
|
||||
48
tests/integration/fixtures/gatewayutils.py
Normal file
48
tests/integration/fixtures/gatewayutils.py
Normal file
@@ -0,0 +1,48 @@
|
||||
import json
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
from wiremock.client import WireMockMatchers
|
||||
|
||||
from fixtures import types
|
||||
|
||||
TEST_KEY_ID = "test-key-id-001"
|
||||
TEST_LIMIT_ID = "test-limit-id-001"
|
||||
|
||||
|
||||
def common_gateway_headers():
|
||||
"""Common headers expected on requests forwarded to the gateway."""
|
||||
return {
|
||||
"X-Signoz-Cloud-Api-Key": {WireMockMatchers.EQUAL_TO: "secret-key"},
|
||||
"X-Consumer-Username": {
|
||||
WireMockMatchers.EQUAL_TO: "lid:00000000-0000-0000-0000-000000000000"
|
||||
},
|
||||
"X-Consumer-Groups": {WireMockMatchers.EQUAL_TO: "ns:default"},
|
||||
}
|
||||
|
||||
|
||||
def get_gateway_requests(signoz: types.SigNoz, method: str, url: str) -> list:
|
||||
"""Return captured requests from the WireMock gateway journal.
|
||||
|
||||
Returns an empty list when no requests match or the admin API is unreachable.
|
||||
"""
|
||||
response = requests.post(
|
||||
signoz.gateway.host_configs["8080"].get("/__admin/requests/find"),
|
||||
json={"method": method, "url": url},
|
||||
timeout=5,
|
||||
)
|
||||
return response.json().get("requests", [])
|
||||
|
||||
|
||||
def get_latest_gateway_request_body(
|
||||
signoz: types.SigNoz, method: str, url: str
|
||||
) -> Optional[dict]:
|
||||
"""Return the parsed JSON body of the most recent matching gateway request.
|
||||
|
||||
WireMock returns requests in reverse chronological order, so ``matched[0]``
|
||||
is always the latest. Returns ``None`` when no matching request is found.
|
||||
"""
|
||||
matched = get_gateway_requests(signoz, method, url)
|
||||
if not matched:
|
||||
return None
|
||||
return json.loads(matched[0]["body"])
|
||||
424
tests/integration/src/ingestionkeys/01_ingestion_keys.py
Normal file
424
tests/integration/src/ingestionkeys/01_ingestion_keys.py
Normal file
@@ -0,0 +1,424 @@
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, List
|
||||
|
||||
import requests
|
||||
from wiremock.client import (
|
||||
HttpMethods,
|
||||
Mapping,
|
||||
MappingRequest,
|
||||
MappingResponse,
|
||||
)
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD, add_license
|
||||
from fixtures.gatewayutils import (
|
||||
TEST_KEY_ID,
|
||||
common_gateway_headers,
|
||||
get_gateway_requests,
|
||||
get_latest_gateway_request_body,
|
||||
)
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
|
||||
def test_apply_license(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, List[Mapping]], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Activate a license so that all subsequent gateway calls succeed."""
|
||||
add_license(signoz, make_http_mocks, get_token)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Ingestion key CRUD
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_create_ingestion_key(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""POST /api/v2/gateway/ingestion_keys creates a key via the gateway."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
make_http_mocks(
|
||||
signoz.gateway,
|
||||
[
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.POST,
|
||||
url="/v1/workspaces/me/keys",
|
||||
headers=common_gateway_headers(),
|
||||
),
|
||||
response=MappingResponse(
|
||||
status=201,
|
||||
json_body={
|
||||
"status": "success",
|
||||
"data": {
|
||||
"id": TEST_KEY_ID,
|
||||
"value": "ingestion-key-secret-value",
|
||||
},
|
||||
},
|
||||
),
|
||||
persistent=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/gateway/ingestion_keys"),
|
||||
json={
|
||||
"name": "my-test-key",
|
||||
"tags": ["env:test", "team:platform"],
|
||||
"expires_at": "2030-01-01T00:00:00Z",
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
assert (
|
||||
response.status_code == HTTPStatus.CREATED
|
||||
), f"Expected 201, got {response.status_code}: {response.text}"
|
||||
|
||||
data = response.json()["data"]
|
||||
assert data["id"] == TEST_KEY_ID
|
||||
assert data["value"] == "ingestion-key-secret-value"
|
||||
|
||||
# Verify the body forwarded to the gateway
|
||||
body = get_latest_gateway_request_body(signoz, "POST", "/v1/workspaces/me/keys")
|
||||
assert body is not None, "Expected a POST request to reach the gateway"
|
||||
assert body["name"] == "my-test-key"
|
||||
assert body["tags"] == ["env:test", "team:platform"]
|
||||
|
||||
|
||||
def test_get_ingestion_keys(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""GET /api/v2/gateway/ingestion_keys lists keys via the gateway."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Default page=1, per_page=10 → gateway gets ?page=1&per_page=10
|
||||
make_http_mocks(
|
||||
signoz.gateway,
|
||||
[
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.GET,
|
||||
url="/v1/workspaces/me/keys?page=1&per_page=10",
|
||||
headers=common_gateway_headers(),
|
||||
),
|
||||
response=MappingResponse(
|
||||
status=200,
|
||||
json_body={
|
||||
"data": [
|
||||
{
|
||||
"id": TEST_KEY_ID,
|
||||
"name": "my-test-key",
|
||||
"value": "secret",
|
||||
"expires_at": "2030-01-01T00:00:00Z",
|
||||
"tags": ["env:test"],
|
||||
"created_at": "2024-01-01T00:00:00Z",
|
||||
"updated_at": "2024-01-01T00:00:00Z",
|
||||
"workspace_id": "ws-1",
|
||||
"limits": [],
|
||||
}
|
||||
],
|
||||
"_pagination": {
|
||||
"page": 1,
|
||||
"per_page": 10,
|
||||
"pages": 1,
|
||||
"total": 1,
|
||||
},
|
||||
},
|
||||
),
|
||||
persistent=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/gateway/ingestion_keys"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
assert (
|
||||
response.status_code == HTTPStatus.OK
|
||||
), f"Expected 200, got {response.status_code}: {response.text}"
|
||||
|
||||
data = response.json()["data"]
|
||||
assert len(data["keys"]) == 1
|
||||
assert data["keys"][0]["id"] == TEST_KEY_ID
|
||||
assert data["keys"][0]["name"] == "my-test-key"
|
||||
assert data["_pagination"]["total"] == 1
|
||||
|
||||
|
||||
def test_get_ingestion_keys_custom_pagination(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""GET /api/v2/gateway/ingestion_keys with custom pagination params."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
make_http_mocks(
|
||||
signoz.gateway,
|
||||
[
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.GET,
|
||||
url="/v1/workspaces/me/keys?page=2&per_page=5",
|
||||
headers=common_gateway_headers(),
|
||||
),
|
||||
response=MappingResponse(
|
||||
status=200,
|
||||
json_body={
|
||||
"data": [],
|
||||
"_pagination": {
|
||||
"page": 2,
|
||||
"per_page": 5,
|
||||
"pages": 1,
|
||||
"total": 3,
|
||||
},
|
||||
},
|
||||
),
|
||||
persistent=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
"/api/v2/gateway/ingestion_keys?page=2&per_page=5"
|
||||
),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
assert (
|
||||
response.status_code == HTTPStatus.OK
|
||||
), f"Expected 200, got {response.status_code}: {response.text}"
|
||||
|
||||
data = response.json()["data"]
|
||||
assert len(data["keys"]) == 0
|
||||
assert data["_pagination"]["page"] == 2
|
||||
assert data["_pagination"]["per_page"] == 5
|
||||
|
||||
|
||||
def test_search_ingestion_keys(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""GET /api/v2/gateway/ingestion_keys/search searches keys by name."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# name, page, per_page are sorted alphabetically by Go url.Values.Encode()
|
||||
make_http_mocks(
|
||||
signoz.gateway,
|
||||
[
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.GET,
|
||||
url="/v1/workspaces/me/keys/search?name=my-test&page=1&per_page=10",
|
||||
headers=common_gateway_headers(),
|
||||
),
|
||||
response=MappingResponse(
|
||||
status=200,
|
||||
json_body={
|
||||
"data": [
|
||||
{
|
||||
"id": TEST_KEY_ID,
|
||||
"name": "my-test-key",
|
||||
"value": "secret",
|
||||
"expires_at": "2030-01-01T00:00:00Z",
|
||||
"tags": ["env:test"],
|
||||
"created_at": "2024-01-01T00:00:00Z",
|
||||
"updated_at": "2024-01-01T00:00:00Z",
|
||||
"workspace_id": "ws-1",
|
||||
"limits": [],
|
||||
}
|
||||
],
|
||||
"_pagination": {
|
||||
"page": 1,
|
||||
"per_page": 10,
|
||||
"pages": 1,
|
||||
"total": 1,
|
||||
},
|
||||
},
|
||||
),
|
||||
persistent=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
"/api/v2/gateway/ingestion_keys/search?name=my-test"
|
||||
),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
assert (
|
||||
response.status_code == HTTPStatus.OK
|
||||
), f"Expected 200, got {response.status_code}: {response.text}"
|
||||
|
||||
data = response.json()["data"]
|
||||
assert len(data["keys"]) == 1
|
||||
assert data["keys"][0]["name"] == "my-test-key"
|
||||
|
||||
|
||||
def test_search_ingestion_keys_empty(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Search returns an empty list when no keys match."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
make_http_mocks(
|
||||
signoz.gateway,
|
||||
[
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.GET,
|
||||
url="/v1/workspaces/me/keys/search?name=nonexistent&page=1&per_page=10",
|
||||
headers=common_gateway_headers(),
|
||||
),
|
||||
response=MappingResponse(
|
||||
status=200,
|
||||
json_body={
|
||||
"data": [],
|
||||
"_pagination": {
|
||||
"page": 1,
|
||||
"per_page": 10,
|
||||
"pages": 0,
|
||||
"total": 0,
|
||||
},
|
||||
},
|
||||
),
|
||||
persistent=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
"/api/v2/gateway/ingestion_keys/search?name=nonexistent"
|
||||
),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
assert (
|
||||
response.status_code == HTTPStatus.OK
|
||||
), f"Expected 200, got {response.status_code}: {response.text}"
|
||||
|
||||
data = response.json()["data"]
|
||||
assert len(data["keys"]) == 0
|
||||
assert data["_pagination"]["total"] == 0
|
||||
|
||||
|
||||
def test_update_ingestion_key(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""PATCH /api/v2/gateway/ingestion_keys/{keyId} updates a key via the gateway."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
gateway_url = f"/v1/workspaces/me/keys/{TEST_KEY_ID}"
|
||||
|
||||
make_http_mocks(
|
||||
signoz.gateway,
|
||||
[
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.PATCH,
|
||||
url=gateway_url,
|
||||
headers=common_gateway_headers(),
|
||||
),
|
||||
response=MappingResponse(status=204),
|
||||
persistent=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
response = requests.patch(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v2/gateway/ingestion_keys/{TEST_KEY_ID}"
|
||||
),
|
||||
json={
|
||||
"name": "renamed-key",
|
||||
"tags": ["env:prod"],
|
||||
"expires_at": "2031-06-15T00:00:00Z",
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
assert (
|
||||
response.status_code == HTTPStatus.NO_CONTENT
|
||||
), f"Expected 204, got {response.status_code}: {response.text}"
|
||||
|
||||
# Verify the body forwarded to the gateway
|
||||
body = get_latest_gateway_request_body(signoz, "PATCH", gateway_url)
|
||||
assert body is not None, "Expected a PATCH request to reach the gateway"
|
||||
assert body["name"] == "renamed-key"
|
||||
assert body["tags"] == ["env:prod"]
|
||||
|
||||
|
||||
def test_delete_ingestion_key(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""DELETE /api/v2/gateway/ingestion_keys/{keyId} deletes a key via the gateway."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
gateway_url = f"/v1/workspaces/me/keys/{TEST_KEY_ID}"
|
||||
|
||||
make_http_mocks(
|
||||
signoz.gateway,
|
||||
[
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.DELETE,
|
||||
url=gateway_url,
|
||||
headers=common_gateway_headers(),
|
||||
),
|
||||
response=MappingResponse(status=204),
|
||||
persistent=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
response = requests.delete(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v2/gateway/ingestion_keys/{TEST_KEY_ID}"
|
||||
),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
assert (
|
||||
response.status_code == HTTPStatus.NO_CONTENT
|
||||
), f"Expected 204, got {response.status_code}: {response.text}"
|
||||
|
||||
# Verify at least one DELETE reached the gateway
|
||||
matched = get_gateway_requests(signoz, "DELETE", gateway_url)
|
||||
assert len(matched) >= 1, "Expected a DELETE request to reach the gateway"
|
||||
418
tests/integration/src/ingestionkeys/02_ingestion_keys_limits.py
Normal file
418
tests/integration/src/ingestionkeys/02_ingestion_keys_limits.py
Normal file
@@ -0,0 +1,418 @@
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, List
|
||||
|
||||
import requests
|
||||
from wiremock.client import (
|
||||
HttpMethods,
|
||||
Mapping,
|
||||
MappingRequest,
|
||||
MappingResponse,
|
||||
)
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD, add_license
|
||||
from fixtures.gatewayutils import (
|
||||
TEST_KEY_ID,
|
||||
TEST_LIMIT_ID,
|
||||
common_gateway_headers,
|
||||
get_gateway_requests,
|
||||
get_latest_gateway_request_body,
|
||||
)
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
|
||||
def test_apply_license(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, List[Mapping]], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Activate a license so that all subsequent gateway calls succeed."""
|
||||
add_license(signoz, make_http_mocks, get_token)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Create ingestion key limit
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_create_ingestion_key_limit_only_size(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Creating a limit with only size omits count from the gateway payload."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
gateway_url = f"/v1/workspaces/me/keys/{TEST_KEY_ID}/limits"
|
||||
|
||||
make_http_mocks(
|
||||
signoz.gateway,
|
||||
[
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.POST,
|
||||
url=gateway_url,
|
||||
headers=common_gateway_headers(),
|
||||
),
|
||||
response=MappingResponse(
|
||||
status=201,
|
||||
json_body={
|
||||
"status": "success",
|
||||
"data": {"id": "limit-created-1"},
|
||||
},
|
||||
),
|
||||
persistent=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v2/gateway/ingestion_keys/{TEST_KEY_ID}/limits"
|
||||
),
|
||||
json={
|
||||
"signal": "logs",
|
||||
"config": {"day": {"size": 1000}},
|
||||
"tags": ["test"],
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
assert (
|
||||
response.status_code == HTTPStatus.CREATED
|
||||
), f"Expected 201, got {response.status_code}: {response.text}"
|
||||
|
||||
assert response.json()["data"]["id"] == "limit-created-1"
|
||||
|
||||
body = get_latest_gateway_request_body(signoz, "POST", gateway_url)
|
||||
assert body is not None, "Expected a POST request to reach the gateway"
|
||||
assert body["signal"] == "logs"
|
||||
assert body["config"]["day"]["size"] == 1000
|
||||
assert "count" not in body["config"]["day"], "count should be absent when not set"
|
||||
assert "second" not in body["config"], "second should be absent when not set"
|
||||
assert body["tags"] == ["test"]
|
||||
|
||||
|
||||
def test_create_ingestion_key_limit_only_count(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Creating a limit with only count omits size from the gateway payload."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
gateway_url = f"/v1/workspaces/me/keys/{TEST_KEY_ID}/limits"
|
||||
|
||||
make_http_mocks(
|
||||
signoz.gateway,
|
||||
[
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.POST,
|
||||
url=gateway_url,
|
||||
headers=common_gateway_headers(),
|
||||
),
|
||||
response=MappingResponse(
|
||||
status=201,
|
||||
json_body={
|
||||
"status": "success",
|
||||
"data": {"id": "limit-created-2"},
|
||||
},
|
||||
),
|
||||
persistent=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v2/gateway/ingestion_keys/{TEST_KEY_ID}/limits"
|
||||
),
|
||||
json={
|
||||
"signal": "traces",
|
||||
"config": {"day": {"count": 500}},
|
||||
"tags": ["test"],
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
assert (
|
||||
response.status_code == HTTPStatus.CREATED
|
||||
), f"Expected 201, got {response.status_code}: {response.text}"
|
||||
|
||||
body = get_latest_gateway_request_body(signoz, "POST", gateway_url)
|
||||
assert body is not None, "Expected a POST request to reach the gateway"
|
||||
assert body["signal"] == "traces"
|
||||
assert body["config"]["day"]["count"] == 500
|
||||
assert "size" not in body["config"]["day"], "size should be absent when not set"
|
||||
assert body["tags"] == ["test"]
|
||||
|
||||
|
||||
def test_create_ingestion_key_limit_both_size_and_count(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Creating a limit with both size and count includes both in the gateway payload."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
gateway_url = f"/v1/workspaces/me/keys/{TEST_KEY_ID}/limits"
|
||||
|
||||
make_http_mocks(
|
||||
signoz.gateway,
|
||||
[
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.POST,
|
||||
url=gateway_url,
|
||||
headers=common_gateway_headers(),
|
||||
),
|
||||
response=MappingResponse(
|
||||
status=201,
|
||||
json_body={
|
||||
"status": "success",
|
||||
"data": {"id": "limit-created-3"},
|
||||
},
|
||||
),
|
||||
persistent=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v2/gateway/ingestion_keys/{TEST_KEY_ID}/limits"
|
||||
),
|
||||
json={
|
||||
"signal": "metrics",
|
||||
"config": {
|
||||
"day": {"size": 2000, "count": 750},
|
||||
"second": {"size": 100, "count": 50},
|
||||
},
|
||||
"tags": ["test"],
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
assert (
|
||||
response.status_code == HTTPStatus.CREATED
|
||||
), f"Expected 201, got {response.status_code}: {response.text}"
|
||||
|
||||
body = get_latest_gateway_request_body(signoz, "POST", gateway_url)
|
||||
assert body is not None, "Expected a POST request to reach the gateway"
|
||||
assert body["signal"] == "metrics"
|
||||
assert body["config"]["day"]["size"] == 2000
|
||||
assert body["config"]["day"]["count"] == 750
|
||||
assert body["config"]["second"]["size"] == 100
|
||||
assert body["config"]["second"]["count"] == 50
|
||||
assert body["tags"] == ["test"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Update ingestion key limit
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_update_ingestion_key_limit_only_size(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Updating a limit with only size omits count from the gateway payload."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
gateway_url = f"/v1/workspaces/me/limits/{TEST_LIMIT_ID}"
|
||||
|
||||
make_http_mocks(
|
||||
signoz.gateway,
|
||||
[
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.PATCH,
|
||||
url=gateway_url,
|
||||
headers=common_gateway_headers(),
|
||||
),
|
||||
response=MappingResponse(status=204),
|
||||
persistent=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
response = requests.patch(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v2/gateway/ingestion_keys/limits/{TEST_LIMIT_ID}"
|
||||
),
|
||||
json={
|
||||
"config": {"day": {"size": 2000}},
|
||||
"tags": ["test"],
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
assert (
|
||||
response.status_code == HTTPStatus.NO_CONTENT
|
||||
), f"Expected 204, got {response.status_code}: {response.text}"
|
||||
|
||||
body = get_latest_gateway_request_body(signoz, "PATCH", gateway_url)
|
||||
assert body is not None, "Expected a PATCH request to reach the gateway"
|
||||
assert body["config"]["day"]["size"] == 2000
|
||||
assert "count" not in body["config"]["day"], "count should be absent when not set"
|
||||
assert "second" not in body["config"], "second should be absent when not set"
|
||||
assert body["tags"] == ["test"]
|
||||
|
||||
|
||||
def test_update_ingestion_key_limit_only_count(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Updating a limit with only count omits size from the gateway payload."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
gateway_url = f"/v1/workspaces/me/limits/{TEST_LIMIT_ID}"
|
||||
|
||||
make_http_mocks(
|
||||
signoz.gateway,
|
||||
[
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.PATCH,
|
||||
url=gateway_url,
|
||||
headers=common_gateway_headers(),
|
||||
),
|
||||
response=MappingResponse(status=204),
|
||||
persistent=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
response = requests.patch(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v2/gateway/ingestion_keys/limits/{TEST_LIMIT_ID}"
|
||||
),
|
||||
json={
|
||||
"config": {"day": {"count": 750}},
|
||||
"tags": ["test"],
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
assert (
|
||||
response.status_code == HTTPStatus.NO_CONTENT
|
||||
), f"Expected 204, got {response.status_code}: {response.text}"
|
||||
|
||||
body = get_latest_gateway_request_body(signoz, "PATCH", gateway_url)
|
||||
assert body is not None, "Expected a PATCH request to reach the gateway"
|
||||
assert body["config"]["day"]["count"] == 750
|
||||
assert "size" not in body["config"]["day"], "size should be absent when not set"
|
||||
assert body["tags"] == ["test"]
|
||||
|
||||
|
||||
def test_update_ingestion_key_limit_both_size_and_count(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Updating a limit with both size and count includes both in the gateway payload."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
gateway_url = f"/v1/workspaces/me/limits/{TEST_LIMIT_ID}"
|
||||
|
||||
make_http_mocks(
|
||||
signoz.gateway,
|
||||
[
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.PATCH,
|
||||
url=gateway_url,
|
||||
headers=common_gateway_headers(),
|
||||
),
|
||||
response=MappingResponse(status=204),
|
||||
persistent=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
response = requests.patch(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v2/gateway/ingestion_keys/limits/{TEST_LIMIT_ID}"
|
||||
),
|
||||
json={
|
||||
"config": {"day": {"size": 1000, "count": 500}},
|
||||
"tags": ["test"],
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
assert (
|
||||
response.status_code == HTTPStatus.NO_CONTENT
|
||||
), f"Expected 204, got {response.status_code}: {response.text}"
|
||||
|
||||
body = get_latest_gateway_request_body(signoz, "PATCH", gateway_url)
|
||||
assert body is not None, "Expected a PATCH request to reach the gateway"
|
||||
assert body["config"]["day"]["size"] == 1000
|
||||
assert body["config"]["day"]["count"] == 500
|
||||
assert body["tags"] == ["test"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Delete ingestion key limit
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_delete_ingestion_key_limit(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""DELETE /api/v2/gateway/ingestion_keys/limits/{limitId} deletes a limit."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
gateway_url = f"/v1/workspaces/me/limits/{TEST_LIMIT_ID}"
|
||||
|
||||
make_http_mocks(
|
||||
signoz.gateway,
|
||||
[
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.DELETE,
|
||||
url=gateway_url,
|
||||
headers=common_gateway_headers(),
|
||||
),
|
||||
response=MappingResponse(status=204),
|
||||
persistent=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
response = requests.delete(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v2/gateway/ingestion_keys/limits/{TEST_LIMIT_ID}"
|
||||
),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
assert (
|
||||
response.status_code == HTTPStatus.NO_CONTENT
|
||||
), f"Expected 204, got {response.status_code}: {response.text}"
|
||||
|
||||
# Verify at least one DELETE reached the gateway
|
||||
matched = get_gateway_requests(signoz, "DELETE", gateway_url)
|
||||
assert len(matched) >= 1, "Expected a DELETE request to reach the gateway"
|
||||
0
tests/integration/src/ingestionkeys/__init__.py
Normal file
0
tests/integration/src/ingestionkeys/__init__.py
Normal file
Reference in New Issue
Block a user