Compare commits

..

1 Commits

Author SHA1 Message Date
ahmadshaheer
955aea2733 fix: handle edge case for logs column options menu navigation 2024-12-04 18:35:44 +04:30
851 changed files with 6614 additions and 29371 deletions

View File

@@ -3,6 +3,7 @@ name: build-pipeline
on:
pull_request:
branches:
- develop
- main
- release/v*

View File

@@ -3,7 +3,7 @@ name: "Update PR labels and Block PR until related docs are shipped for the feat
on:
pull_request:
branches:
- main
- develop
types: [opened, edited, labeled, unlabeled]
permissions:

View File

@@ -42,7 +42,7 @@ jobs:
kubectl create ns sample-application
# apply hotrod k8s manifest file
kubectl -n sample-application apply -f https://raw.githubusercontent.com/SigNoz/signoz/main/sample-apps/hotrod/hotrod.yaml
kubectl -n sample-application apply -f https://raw.githubusercontent.com/SigNoz/signoz/develop/sample-apps/hotrod/hotrod.yaml
# wait for all deployments in sample-application namespace to be READY
kubectl -n sample-application get deploy --output name | xargs -r -n1 -t kubectl -n sample-application rollout status --timeout=300s

View File

@@ -2,8 +2,7 @@ name: Jest Coverage - changed files
on:
pull_request:
branches:
- main
branches: develop
jobs:
build:
@@ -12,7 +11,7 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
with:
ref: "refs/heads/main"
ref: "refs/heads/develop"
token: ${{ secrets.GITHUB_TOKEN }} # Provide the GitHub token for authentication
- name: Fetch branch

View File

@@ -1,36 +0,0 @@
name: prereleaser
on:
# schedule every wednesday 9:30 AM UTC (3pm IST)
schedule:
- cron: '30 9 * * 3'
# allow manual triggering of the workflow by a maintainer
workflow_dispatch:
inputs:
release_type:
description: "Type of the release"
type: choice
required: true
options:
- 'patch'
- 'minor'
- 'major'
jobs:
verify:
uses: signoz/primus.workflows/.github/workflows/github-verify.yaml@main
secrets: inherit
with:
PRIMUS_REF: main
GITHUB_TEAM_NAME: releaser
GITHUB_MEMBER_NAME: ${{ github.actor }}
signoz:
if: ${{ always() && (needs.verify.result == 'success' || github.event.name == 'schedule') }}
uses: signoz/primus.workflows/.github/workflows/releaser.yaml@main
secrets: inherit
needs: [verify]
with:
PRIMUS_REF: main
PROJECT_NAME: signoz
RELEASE_TYPE: ${{ inputs.release_type || 'minor' }}

View File

@@ -4,6 +4,7 @@ on:
push:
branches:
- main
- develop
tags:
- v*
@@ -57,17 +58,6 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Create .env file
run: |
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
- name: Setup golang
uses: actions/setup-go@v4
with:

View File

@@ -1,32 +0,0 @@
name: releaser
on:
# trigger on new latest release
release:
types: [published]
jobs:
detect:
runs-on: ubuntu-latest
outputs:
release_type: ${{ steps.find.outputs.release_type }}
steps:
- id: find
name: find
run: |
release_tag=${{ github.event.release.tag_name }}
patch_number=$(echo $release_tag | awk -F. '{print $3}')
release_type="minor"
if [[ $patch_number -ne 0 ]]; then
release_type="patch"
fi
echo "release_type=${release_type}" >> "$GITHUB_OUTPUT"
charts:
uses: signoz/primus.workflows/.github/workflows/github-trigger.yaml@main
secrets: inherit
needs: [detect]
with:
PRIMUS_REF: main
GITHUB_REPOSITORY_NAME: charts
GITHUB_EVENT_NAME: prereleaser
GITHUB_EVENT_PAYLOAD: "{\"release_type\": \"${{ needs.detect.outputs.release_type }}\"}"

View File

@@ -3,6 +3,7 @@ on:
pull_request:
branches:
- main
- develop
paths:
- 'frontend/**'
defaults:

View File

@@ -1,12 +1,12 @@
name: staging-deployment
# Trigger deployment only on push to main branch
# Trigger deployment only on push to develop branch
on:
push:
branches:
- main
- develop
jobs:
deploy:
name: Deploy latest main branch to staging
name: Deploy latest develop branch to staging
runs-on: ubuntu-latest
environment: staging
permissions:

View File

@@ -44,7 +44,7 @@ jobs:
git add .
git stash push -m "stashed on $(date --iso-8601=seconds)"
git fetch origin
git checkout main
git checkout develop
git pull
# This is added to include the scenerio when new commit in PR is force-pushed
git branch -D ${GITHUB_BRANCH}

View File

@@ -339,7 +339,7 @@ to make SigNoz UI available at [localhost:3301](http://localhost:3301)
**5.1.1 To install the HotROD sample app:**
```bash
curl -sL https://github.com/SigNoz/signoz/raw/main/sample-apps/hotrod/hotrod-install.sh \
curl -sL https://github.com/SigNoz/signoz/raw/develop/sample-apps/hotrod/hotrod-install.sh \
| HELM_RELEASE=my-release SIGNOZ_NAMESPACE=platform bash
```
@@ -362,7 +362,7 @@ kubectl -n sample-application run strzal --image=djbingham/curl \
**5.1.4 To delete the HotROD sample app:**
```bash
curl -sL https://github.com/SigNoz/signoz/raw/main/sample-apps/hotrod/hotrod-delete.sh \
curl -sL https://github.com/SigNoz/signoz/raw/develop/sample-apps/hotrod/hotrod-delete.sh \
| HOTROD_NAMESPACE=sample-application bash
```

View File

@@ -98,12 +98,12 @@ build-query-service-static-arm64:
# Steps to build static binary of query service for all platforms
.PHONY: build-query-service-static-all
build-query-service-static-all: build-query-service-static-amd64 build-query-service-static-arm64 build-frontend-static
build-query-service-static-all: build-query-service-static-amd64 build-query-service-static-arm64
# Steps to build and push docker image of query service
.PHONY: build-query-service-amd64 build-push-query-service
# Step to build docker image of query service in amd64 (used in build pipeline)
build-query-service-amd64: build-query-service-static-amd64 build-frontend-static
build-query-service-amd64: build-query-service-static-amd64
@echo "------------------"
@echo "--> Building query-service docker image for amd64"
@echo "------------------"
@@ -190,4 +190,4 @@ check-no-ee-references:
fi
test:
go test ./pkg/...
go test ./pkg/query-service/...

View File

@@ -1,70 +0,0 @@
##################### SigNoz Configuration Defaults #####################
#
# Do not modify this file
#
##################### Instrumentation #####################
instrumentation:
logs:
level: info
enabled: false
processors:
batch:
exporter:
otlp:
endpoint: localhost:4317
traces:
enabled: false
processors:
batch:
exporter:
otlp:
endpoint: localhost:4317
metrics:
enabled: true
readers:
pull:
exporter:
prometheus:
host: "0.0.0.0"
port: 9090
##################### Web #####################
web:
# Whether to enable the web frontend
enabled: true
# The prefix to serve web on
prefix: /
# The directory containing the static build files.
directory: /etc/signoz/web
##################### Cache #####################
cache:
# specifies the caching provider to use.
provider: memory
# memory: Uses in-memory caching.
memory:
# Time-to-live for cache entries in memory. Specify the duration in ns
ttl: 60000000000
# The interval at which the cache will be cleaned up
cleanupInterval:
# redis: Uses Redis as the caching backend.
redis:
# The hostname or IP address of the Redis server.
host: localhost
# The port on which the Redis server is running. Default is usually 6379.
port: 6379
# The password for authenticating with the Redis server, if required.
password:
# The Redis database number to use
db: 0
##################### SQLStore #####################
sqlstore:
# specifies the SQLStore provider to use.
provider: sqlite
# The maximum number of open connections to the database.
max_open_conns: 100
sqlite:
# The path to the SQLite database file.
path: /var/lib/signoz/signoz.db

View File

@@ -58,7 +58,7 @@ from the HotROD application, you should see the data generated from hotrod in Si
```sh
kubectl create ns sample-application
kubectl -n sample-application apply -f https://raw.githubusercontent.com/SigNoz/signoz/main/sample-apps/hotrod/hotrod.yaml
kubectl -n sample-application apply -f https://raw.githubusercontent.com/SigNoz/signoz/develop/sample-apps/hotrod/hotrod.yaml
```
To generate load:

View File

@@ -1,4 +1,5 @@
version: "3.9"
x-clickhouse-defaults: &clickhouse-defaults
image: clickhouse/clickhouse-server:24.1.2-alpine
tty: true
@@ -15,7 +16,14 @@ x-clickhouse-defaults: &clickhouse-defaults
max-file: "3"
healthcheck:
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
test: ["CMD", "wget", "--spider", "-q", "0.0.0.0:8123/ping"]
test:
[
"CMD",
"wget",
"--spider",
"-q",
"0.0.0.0:8123/ping"
]
interval: 30s
timeout: 5s
retries: 3
@@ -24,12 +32,15 @@ x-clickhouse-defaults: &clickhouse-defaults
nofile:
soft: 262144
hard: 262144
x-db-depend: &db-depend
depends_on:
- clickhouse
- otel-collector-migrator
# - clickhouse-2
# - clickhouse-3
services:
zookeeper-1:
image: bitnami/zookeeper:3.7.1
@@ -46,6 +57,7 @@ services:
# - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888
- ALLOW_ANONYMOUS_LOGIN=yes
- ZOO_AUTOPURGE_INTERVAL=1
# zookeeper-2:
# image: bitnami/zookeeper:3.7.0
# hostname: zookeeper-2
@@ -77,8 +89,9 @@ services:
# - ZOO_SERVERS=zookeeper-1:2888:3888,zookeeper-2:2888:3888,0.0.0.0:2888:3888
# - ALLOW_ANONYMOUS_LOGIN=yes
# - ZOO_AUTOPURGE_INTERVAL=1
clickhouse:
!!merge <<: *clickhouse-defaults
<<: *clickhouse-defaults
hostname: clickhouse
# ports:
# - "9000:9000"
@@ -90,6 +103,7 @@ services:
- ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
# - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
- ./data/clickhouse/:/var/lib/clickhouse/
# clickhouse-2:
# <<: *clickhouse-defaults
# hostname: clickhouse-2
@@ -117,6 +131,7 @@ services:
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
# - ./data/clickhouse-3/:/var/lib/clickhouse/
alertmanager:
image: signoz/alertmanager:0.23.7
volumes:
@@ -129,9 +144,14 @@ services:
deploy:
restart_policy:
condition: on-failure
query-service:
image: signoz/query-service:0.68.0
command: ["-config=/root/config/prometheus.yml", "--use-logs-new-schema=true", "--use-trace-new-schema=true"]
image: signoz/query-service:0.59.0
command:
[
"-config=/root/config/prometheus.yml",
"--use-logs-new-schema=true"
]
# ports:
# - "6060:6060" # pprof port
# - "8080:8080" # query-service port
@@ -149,16 +169,24 @@ services:
- TELEMETRY_ENABLED=true
- DEPLOYMENT_TYPE=docker-swarm
healthcheck:
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"]
test:
[
"CMD",
"wget",
"--spider",
"-q",
"localhost:8080/api/v1/health"
]
interval: 30s
timeout: 5s
retries: 3
deploy:
restart_policy:
condition: on-failure
!!merge <<: *db-depend
<<: *db-depend
frontend:
image: signoz/frontend:0.68.0
image: signoz/frontend:0.59.0
deploy:
restart_policy:
condition: on-failure
@@ -169,9 +197,15 @@ services:
- "3301:3301"
volumes:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector:
image: signoz/signoz-otel-collector:0.111.23
command: ["--config=/etc/otel-collector-config.yaml", "--manager-config=/etc/manager-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
image: signoz/signoz-otel-collector:0.111.9
command:
[
"--config=/etc/otel-collector-config.yaml",
"--manager-config=/etc/manager-config.yaml",
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
]
user: root # required for reading docker container logs
volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
@@ -201,20 +235,22 @@ services:
- clickhouse
- otel-collector-migrator
- query-service
otel-collector-migrator:
image: signoz/signoz-schema-migrator:0.111.23
deploy:
restart_policy:
condition: on-failure
delay: 5s
command:
- "sync"
- "--dsn=tcp://clickhouse:9000"
- "--up="
depends_on:
- clickhouse
# - clickhouse-2
# - clickhouse-3
image: signoz/signoz-schema-migrator:0.111.9
deploy:
restart_policy:
condition: on-failure
delay: 5s
command:
- "sync"
- "--dsn=tcp://clickhouse:9000"
- "--up="
depends_on:
- clickhouse
# - clickhouse-2
# - clickhouse-3
logspout:
image: "gliderlabs/logspout:v3.2.14"
volumes:
@@ -227,15 +263,17 @@ services:
mode: global
restart_policy:
condition: on-failure
hotrod:
image: jaegertracing/example-hotrod:1.30
command: ["all"]
command: [ "all" ]
environment:
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
logging:
options:
max-size: 50m
max-file: "3"
load-hotrod:
image: "signoz/locust:1.2.3"
hostname: load-hotrod

View File

@@ -110,7 +110,6 @@ exporters:
clickhousetraces:
datasource: tcp://clickhouse:9000/signoz_traces
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
use_new_schema: true
clickhousemetricswrite:
endpoint: tcp://clickhouse:9000/signoz_metrics
resource_to_telemetry_conversion:

View File

@@ -1,6 +1,8 @@
version: "2.4"
include:
- test-app-docker-compose.yaml
services:
zookeeper-1:
image: bitnami/zookeeper:3.7.1
@@ -18,6 +20,7 @@ services:
# - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888
- ALLOW_ANONYMOUS_LOGIN=yes
- ZOO_AUTOPURGE_INTERVAL=1
clickhouse:
image: clickhouse/clickhouse-server:24.1.2-alpine
container_name: signoz-clickhouse
@@ -40,10 +43,18 @@ services:
max-file: "3"
healthcheck:
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
test: ["CMD", "wget", "--spider", "-q", "0.0.0.0:8123/ping"]
test:
[
"CMD",
"wget",
"--spider",
"-q",
"0.0.0.0:8123/ping"
]
interval: 30s
timeout: 5s
retries: 3
alertmanager:
container_name: signoz-alertmanager
image: signoz/alertmanager:0.23.7
@@ -56,25 +67,31 @@ services:
command:
- --queryService.url=http://query-service:8085
- --storage.path=/data
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.111.23}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.111.9}
container_name: otel-migrator
command:
- "sync"
- "--dsn=tcp://clickhouse:9000"
- "--up="
depends_on:
clickhouse:
condition: service_healthy
# clickhouse-2:
# condition: service_healthy
# clickhouse-3:
# condition: service_healthy
# clickhouse-2:
# condition: service_healthy
# clickhouse-3:
# condition: service_healthy
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
otel-collector:
container_name: signoz-otel-collector
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.111.23}
command: ["--config=/etc/otel-collector-config.yaml", "--manager-config=/etc/manager-config.yaml", "--copy-path=/var/tmp/collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
image: signoz/signoz-otel-collector:0.111.9
command:
[
"--config=/etc/otel-collector-config.yaml",
"--manager-config=/etc/manager-config.yaml",
"--copy-path=/var/tmp/collector-config.yaml",
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
]
# user: root # required for reading docker container logs
volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
@@ -103,6 +120,7 @@ services:
condition: service_completed_successfully
query-service:
condition: service_healthy
logspout:
image: "gliderlabs/logspout:v3.2.14"
container_name: signoz-logspout

View File

@@ -25,8 +25,7 @@ services:
command:
[
"-config=/root/config/prometheus.yml",
"--use-logs-new-schema=true",
"--use-trace-new-schema=true"
"--use-logs-new-schema=true"
]
ports:
- "6060:6060"

View File

@@ -13,7 +13,14 @@ x-clickhouse-defaults: &clickhouse-defaults
max-file: "3"
healthcheck:
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
test: ["CMD", "wget", "--spider", "-q", "0.0.0.0:8123/ping"]
test:
[
"CMD",
"wget",
"--spider",
"-q",
"0.0.0.0:8123/ping"
]
interval: 30s
timeout: 5s
retries: 3
@@ -22,17 +29,20 @@ x-clickhouse-defaults: &clickhouse-defaults
nofile:
soft: 262144
hard: 262144
x-db-depend: &db-depend
depends_on:
clickhouse:
condition: service_healthy
otel-collector-migrator-sync:
condition: service_completed_successfully
# clickhouse-2:
# condition: service_healthy
# clickhouse-3:
# condition: service_healthy
# clickhouse-2:
# condition: service_healthy
# clickhouse-3:
# condition: service_healthy
services:
zookeeper-1:
image: bitnami/zookeeper:3.7.1
container_name: signoz-zookeeper-1
@@ -49,6 +59,7 @@ services:
# - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888
- ALLOW_ANONYMOUS_LOGIN=yes
- ZOO_AUTOPURGE_INTERVAL=1
# zookeeper-2:
# image: bitnami/zookeeper:3.7.0
# container_name: signoz-zookeeper-2
@@ -82,8 +93,9 @@ services:
# - ZOO_SERVERS=zookeeper-1:2888:3888,zookeeper-2:2888:3888,0.0.0.0:2888:3888
# - ALLOW_ANONYMOUS_LOGIN=yes
# - ZOO_AUTOPURGE_INTERVAL=1
clickhouse:
!!merge <<: *clickhouse-defaults
<<: *clickhouse-defaults
container_name: signoz-clickhouse
hostname: clickhouse
ports:
@@ -98,6 +110,7 @@ services:
# - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
- ./data/clickhouse/:/var/lib/clickhouse/
- ./user_scripts:/var/lib/clickhouse/user_scripts/
# clickhouse-2:
# <<: *clickhouse-defaults
# container_name: signoz-clickhouse-2
@@ -115,6 +128,7 @@ services:
# - ./data/clickhouse-2/:/var/lib/clickhouse/
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
# clickhouse-3:
# <<: *clickhouse-defaults
# container_name: signoz-clickhouse-3
@@ -131,6 +145,7 @@ services:
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
# - ./data/clickhouse-3/:/var/lib/clickhouse/
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
alertmanager:
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.7}
container_name: signoz-alertmanager
@@ -143,11 +158,17 @@ services:
command:
- --queryService.url=http://query-service:8085
- --storage.path=/data
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service:
image: signoz/query-service:${DOCKER_TAG:-0.68.0}
image: signoz/query-service:${DOCKER_TAG:-0.59.0}
container_name: signoz-query-service
command: ["-config=/root/config/prometheus.yml", "--use-logs-new-schema=true", "--use-trace-new-schema=true"]
command:
[
"-config=/root/config/prometheus.yml",
"--use-logs-new-schema=true"
]
# ports:
# - "6060:6060" # pprof port
# - "8080:8080" # query-service port
@@ -166,13 +187,21 @@ services:
- DEPLOYMENT_TYPE=docker-standalone-amd
restart: on-failure
healthcheck:
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"]
test:
[
"CMD",
"wget",
"--spider",
"-q",
"localhost:8080/api/v1/health"
]
interval: 30s
timeout: 5s
retries: 3
!!merge <<: *db-depend
<<: *db-depend
frontend:
image: signoz/frontend:${DOCKER_TAG:-0.68.0}
image: signoz/frontend:${DOCKER_TAG:-0.59.0}
container_name: signoz-frontend
restart: on-failure
depends_on:
@@ -182,8 +211,9 @@ services:
- "3301:3301"
volumes:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator-sync:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.111.23}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.111.9}
container_name: otel-migrator-sync
command:
- "sync"
@@ -192,12 +222,13 @@ services:
depends_on:
clickhouse:
condition: service_healthy
# clickhouse-2:
# condition: service_healthy
# clickhouse-3:
# condition: service_healthy
# clickhouse-2:
# condition: service_healthy
# clickhouse-3:
# condition: service_healthy
otel-collector-migrator-async:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.111.23}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.111.9}
container_name: otel-migrator-async
command:
- "async"
@@ -208,14 +239,21 @@ services:
condition: service_healthy
otel-collector-migrator-sync:
condition: service_completed_successfully
# clickhouse-2:
# condition: service_healthy
# clickhouse-3:
# condition: service_healthy
# clickhouse-2:
# condition: service_healthy
# clickhouse-3:
# condition: service_healthy
otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.111.23}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.111.9}
container_name: signoz-otel-collector
command: ["--config=/etc/otel-collector-config.yaml", "--manager-config=/etc/manager-config.yaml", "--copy-path=/var/tmp/collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
command:
[
"--config=/etc/otel-collector-config.yaml",
"--manager-config=/etc/manager-config.yaml",
"--copy-path=/var/tmp/collector-config.yaml",
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
]
user: root # required for reading docker container logs
volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
@@ -245,6 +283,7 @@ services:
condition: service_completed_successfully
query-service:
condition: service_healthy
logspout:
image: "gliderlabs/logspout:v3.2.14"
container_name: signoz-logspout

View File

@@ -1,6 +1,8 @@
version: "2.4"
include:
- test-app-docker-compose.yaml
x-clickhouse-defaults: &clickhouse-defaults
restart: on-failure
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
@@ -16,7 +18,14 @@ x-clickhouse-defaults: &clickhouse-defaults
max-file: "3"
healthcheck:
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
test: ["CMD", "wget", "--spider", "-q", "0.0.0.0:8123/ping"]
test:
[
"CMD",
"wget",
"--spider",
"-q",
"0.0.0.0:8123/ping"
]
interval: 30s
timeout: 5s
retries: 3
@@ -25,17 +34,20 @@ x-clickhouse-defaults: &clickhouse-defaults
nofile:
soft: 262144
hard: 262144
x-db-depend: &db-depend
depends_on:
clickhouse:
condition: service_healthy
otel-collector-migrator:
condition: service_completed_successfully
# clickhouse-2:
# condition: service_healthy
# clickhouse-3:
# condition: service_healthy
# clickhouse-2:
# condition: service_healthy
# clickhouse-3:
# condition: service_healthy
services:
zookeeper-1:
image: bitnami/zookeeper:3.7.1
container_name: signoz-zookeeper-1
@@ -52,6 +64,7 @@ services:
# - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888
- ALLOW_ANONYMOUS_LOGIN=yes
- ZOO_AUTOPURGE_INTERVAL=1
# zookeeper-2:
# image: bitnami/zookeeper:3.7.0
# container_name: signoz-zookeeper-2
@@ -85,8 +98,9 @@ services:
# - ZOO_SERVERS=zookeeper-1:2888:3888,zookeeper-2:2888:3888,0.0.0.0:2888:3888
# - ALLOW_ANONYMOUS_LOGIN=yes
# - ZOO_AUTOPURGE_INTERVAL=1
clickhouse:
!!merge <<: *clickhouse-defaults
<<: *clickhouse-defaults
container_name: signoz-clickhouse
hostname: clickhouse
ports:
@@ -101,6 +115,7 @@ services:
# - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
- ./data/clickhouse/:/var/lib/clickhouse/
- ./user_scripts:/var/lib/clickhouse/user_scripts/
# clickhouse-2:
# <<: *clickhouse-defaults
# container_name: signoz-clickhouse-2
@@ -118,6 +133,7 @@ services:
# - ./data/clickhouse-2/:/var/lib/clickhouse/
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
# clickhouse-3:
# <<: *clickhouse-defaults
# container_name: signoz-clickhouse-3
@@ -134,6 +150,7 @@ services:
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
# - ./data/clickhouse-3/:/var/lib/clickhouse/
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
alertmanager:
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.7}
container_name: signoz-alertmanager
@@ -146,11 +163,18 @@ services:
command:
- --queryService.url=http://query-service:8085
- --storage.path=/data
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service:
image: signoz/query-service:${DOCKER_TAG:-0.68.0}
image: signoz/query-service:${DOCKER_TAG:-0.59.0}
container_name: signoz-query-service
command: ["-config=/root/config/prometheus.yml", "-gateway-url=https://api.staging.signoz.cloud", "--use-logs-new-schema=true", "--use-trace-new-schema=true"]
command:
[
"-config=/root/config/prometheus.yml",
"-gateway-url=https://api.staging.signoz.cloud",
"--use-logs-new-schema=true"
]
# ports:
# - "6060:6060" # pprof port
# - "8080:8080" # query-service port
@@ -170,13 +194,21 @@ services:
- KAFKA_SPAN_EVAL=${KAFKA_SPAN_EVAL:-false}
restart: on-failure
healthcheck:
test: ["CMD", "wget", "--spider", "-q", "localhost:8080/api/v1/health"]
test:
[
"CMD",
"wget",
"--spider",
"-q",
"localhost:8080/api/v1/health"
]
interval: 30s
timeout: 5s
retries: 3
!!merge <<: *db-depend
<<: *db-depend
frontend:
image: signoz/frontend:${DOCKER_TAG:-0.68.0}
image: signoz/frontend:${DOCKER_TAG:-0.59.0}
container_name: signoz-frontend
restart: on-failure
depends_on:
@@ -186,22 +218,31 @@ services:
- "3301:3301"
volumes:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.111.23}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.111.9}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
depends_on:
clickhouse:
condition: service_healthy
# clickhouse-2:
# condition: service_healthy
# clickhouse-3:
# condition: service_healthy
# clickhouse-2:
# condition: service_healthy
# clickhouse-3:
# condition: service_healthy
otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.111.23}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.111.9}
container_name: signoz-otel-collector
command: ["--config=/etc/otel-collector-config.yaml", "--manager-config=/etc/manager-config.yaml", "--copy-path=/var/tmp/collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName"]
command:
[
"--config=/etc/otel-collector-config.yaml",
"--manager-config=/etc/manager-config.yaml",
"--copy-path=/var/tmp/collector-config.yaml",
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
]
user: root # required for reading docker container logs
volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
@@ -231,6 +272,7 @@ services:
condition: service_completed_successfully
query-service:
condition: service_healthy
logspout:
image: "gliderlabs/logspout:v3.2.14"
container_name: signoz-logspout

View File

@@ -119,7 +119,6 @@ exporters:
clickhousetraces:
datasource: tcp://clickhouse:9000/signoz_traces
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
use_new_schema: true
clickhousemetricswrite:
endpoint: tcp://clickhouse:9000/signoz_metrics
resource_to_telemetry_conversion:

View File

@@ -23,9 +23,6 @@ COPY pkg/query-service/templates /root/templates
# Make query-service executable for non-root users
RUN chmod 755 /root /root/query-service
# Copy frontend
COPY frontend/build/ /etc/signoz/web/
# run the binary
ENTRYPOINT ["./query-service"]

View File

@@ -12,7 +12,6 @@ import (
"go.signoz.io/signoz/ee/query-service/license"
"go.signoz.io/signoz/ee/query-service/usage"
baseapp "go.signoz.io/signoz/pkg/query-service/app"
"go.signoz.io/signoz/pkg/query-service/app/cloudintegrations"
"go.signoz.io/signoz/pkg/query-service/app/integrations"
"go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
"go.signoz.io/signoz/pkg/query-service/cache"
@@ -35,7 +34,6 @@ type APIHandlerOptions struct {
FeatureFlags baseint.FeatureLookup
LicenseManager *license.Manager
IntegrationsController *integrations.Controller
CloudIntegrationsController *cloudintegrations.Controller
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
Cache cache.Cache
Gateway *httputil.ReverseProxy
@@ -43,6 +41,7 @@ type APIHandlerOptions struct {
FluxInterval time.Duration
UseLogsNewSchema bool
UseTraceNewSchema bool
UseLicensesV3 bool
}
type APIHandler struct {
@@ -64,12 +63,12 @@ func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) {
RuleManager: opts.RulesManager,
FeatureFlags: opts.FeatureFlags,
IntegrationsController: opts.IntegrationsController,
CloudIntegrationsController: opts.CloudIntegrationsController,
LogsParsingPipelineController: opts.LogsParsingPipelineController,
Cache: opts.Cache,
FluxInterval: opts.FluxInterval,
UseLogsNewSchema: opts.UseLogsNewSchema,
UseTraceNewSchema: opts.UseTraceNewSchema,
UseLicensesV3: opts.UseLicensesV3,
})
if err != nil {

View File

@@ -84,6 +84,13 @@ func (ah *APIHandler) listLicenses(w http.ResponseWriter, r *http.Request) {
}
func (ah *APIHandler) applyLicense(w http.ResponseWriter, r *http.Request) {
if ah.UseLicensesV3 {
// if the licenses v3 is toggled on then do not apply license in v2 and run the validator!
// TODO: remove after migration to v3 and deprecation from zeus
zap.L().Info("early return from apply license v2 call")
render.Success(w, http.StatusOK, nil)
return
}
var l model.License
if err := json.NewDecoder(r.Body).Decode(&l); err != nil {
@@ -95,7 +102,7 @@ func (ah *APIHandler) applyLicense(w http.ResponseWriter, r *http.Request) {
RespondError(w, model.BadRequest(fmt.Errorf("license key is required")), nil)
return
}
license, apiError := ah.LM().ActivateV3(r.Context(), l.Key)
license, apiError := ah.LM().Activate(r.Context(), l.Key)
if apiError != nil {
RespondError(w, apiError, nil)
return
@@ -258,12 +265,24 @@ func convertLicenseV3ToLicenseV2(licenses []*model.LicenseV3) []model.License {
}
func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
licensesV3, apierr := ah.LM().GetLicensesV3(r.Context())
if apierr != nil {
RespondError(w, apierr, nil)
return
var licenses []model.License
if ah.UseLicensesV3 {
licensesV3, err := ah.LM().GetLicensesV3(r.Context())
if err != nil {
RespondError(w, err, nil)
return
}
licenses = convertLicenseV3ToLicenseV2(licensesV3)
} else {
_licenses, apiError := ah.LM().GetLicenses(r.Context())
if apiError != nil {
RespondError(w, apiError, nil)
return
}
licenses = _licenses
}
licenses := convertLicenseV3ToLicenseV2(licensesV3)
resp := model.Licenses{
TrialStart: -1,

View File

@@ -2,32 +2,31 @@ package api
import (
"net/http"
"go.signoz.io/signoz/ee/query-service/app/db"
"go.signoz.io/signoz/ee/query-service/model"
baseapp "go.signoz.io/signoz/pkg/query-service/app"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
if !ah.CheckFeature(basemodel.SmartTraceDetail) {
zap.L().Info("SmartTraceDetail feature is not enabled in this plan")
ah.APIHandler.SearchTraces(w, r)
return
}
searchTracesParams, err := baseapp.ParseSearchTracesParams(r)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
return
}
ah.APIHandler.SearchTraces(w, r)
return
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), searchTracesParams, db.SmartTraceAlgorithm)
if ah.HandleError(w, err, http.StatusBadRequest) {
return
}
// This is commented since this will be taken care by new trace API
ah.WriteJSON(w, r, result)
// if !ah.CheckFeature(basemodel.SmartTraceDetail) {
// zap.L().Info("SmartTraceDetail feature is not enabled in this plan")
// ah.APIHandler.SearchTraces(w, r)
// return
// }
// searchTracesParams, err := baseapp.ParseSearchTracesParams(r)
// if err != nil {
// RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
// return
// }
// result, err := ah.opts.DataConnector.SearchTraces(r.Context(), searchTracesParams, db.SmartTraceAlgorithm)
// if ah.HandleError(w, err, http.StatusBadRequest) {
// return
// }
// ah.WriteJSON(w, r, result)
}

View File

@@ -32,15 +32,12 @@ import (
baseauth "go.signoz.io/signoz/pkg/query-service/auth"
"go.signoz.io/signoz/pkg/query-service/migrate"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/signoz"
"go.signoz.io/signoz/pkg/web"
licensepkg "go.signoz.io/signoz/ee/query-service/license"
"go.signoz.io/signoz/ee/query-service/usage"
"go.signoz.io/signoz/pkg/query-service/agentConf"
baseapp "go.signoz.io/signoz/pkg/query-service/app"
"go.signoz.io/signoz/pkg/query-service/app/cloudintegrations"
"go.signoz.io/signoz/pkg/query-service/app/dashboards"
baseexplorer "go.signoz.io/signoz/pkg/query-service/app/explorer"
"go.signoz.io/signoz/pkg/query-service/app/integrations"
@@ -81,6 +78,7 @@ type ServerOptions struct {
GatewayUrl string
UseLogsNewSchema bool
UseTraceNewSchema bool
UseLicensesV3 bool
}
// Server runs HTTP api service
@@ -110,15 +108,26 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
}
// NewServer creates and initializes Server
func NewServer(serverOptions *ServerOptions, config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
modelDao, err := dao.InitDao(signoz.SQLStore.SQLxDB())
func NewServer(serverOptions *ServerOptions) (*Server, error) {
modelDao, err := dao.InitDao("sqlite", baseconst.RELATIONAL_DATASOURCE_PATH)
if err != nil {
return nil, err
}
baseexplorer.InitWithDB(signoz.SQLStore.SQLxDB())
preferences.InitDB(signoz.SQLStore.SQLxDB())
dashboards.InitDB(signoz.SQLStore.SQLxDB())
baseexplorer.InitWithDSN(baseconst.RELATIONAL_DATASOURCE_PATH)
if err := preferences.InitDB(baseconst.RELATIONAL_DATASOURCE_PATH); err != nil {
return nil, err
}
localDB, err := dashboards.InitDB(baseconst.RELATIONAL_DATASOURCE_PATH)
if err != nil {
return nil, err
}
localDB.SetMaxOpenConns(10)
gatewayProxy, err := gateway.NewProxy(serverOptions.GatewayUrl, gateway.RoutePrefix)
if err != nil {
@@ -126,7 +135,7 @@ func NewServer(serverOptions *ServerOptions, config signoz.Config, signoz *signo
}
// initiate license manager
lm, err := licensepkg.StartManager("sqlite", signoz.SQLStore.SQLxDB())
lm, err := licensepkg.StartManager("sqlite", localDB, serverOptions.UseLicensesV3)
if err != nil {
return nil, err
}
@@ -140,7 +149,7 @@ func NewServer(serverOptions *ServerOptions, config signoz.Config, signoz *signo
if storage == "clickhouse" {
zap.L().Info("Using ClickHouse as datastore ...")
qb := db.NewDataConnector(
signoz.SQLStore.SQLxDB(),
localDB,
serverOptions.PromConfigPath,
lm,
serverOptions.MaxIdleConns,
@@ -176,7 +185,7 @@ func NewServer(serverOptions *ServerOptions, config signoz.Config, signoz *signo
rm, err := makeRulesManager(serverOptions.PromConfigPath,
baseconst.GetAlertManagerApiPrefix(),
serverOptions.RuleRepoURL,
signoz.SQLStore.SQLxDB(),
localDB,
reader,
c,
serverOptions.DisableRules,
@@ -197,25 +206,21 @@ func NewServer(serverOptions *ServerOptions, config signoz.Config, signoz *signo
}()
// initiate opamp
_ = opAmpModel.InitDB(signoz.SQLStore.SQLxDB())
_, err = opAmpModel.InitDB(localDB)
if err != nil {
return nil, err
}
integrationsController, err := integrations.NewController(signoz.SQLStore.SQLxDB())
integrationsController, err := integrations.NewController(localDB)
if err != nil {
return nil, fmt.Errorf(
"couldn't create integrations controller: %w", err,
)
}
cloudIntegrationsController, err := cloudintegrations.NewController(signoz.SQLStore.SQLxDB())
if err != nil {
return nil, fmt.Errorf(
"couldn't create cloud provider integrations controller: %w", err,
)
}
// ingestion pipelines manager
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
signoz.SQLStore.SQLxDB(), integrationsController.GetPipelinesForInstalledIntegrations,
localDB, "sqlite", integrationsController.GetPipelinesForInstalledIntegrations,
)
if err != nil {
return nil, err
@@ -223,7 +228,8 @@ func NewServer(serverOptions *ServerOptions, config signoz.Config, signoz *signo
// initiate agent config handler
agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{
DB: signoz.SQLStore.SQLxDB(),
DB: localDB,
DBEngine: AppDbEngine,
AgentFeatures: []agentConf.AgentFeature{logParsingPipelineController},
})
if err != nil {
@@ -262,13 +268,13 @@ func NewServer(serverOptions *ServerOptions, config signoz.Config, signoz *signo
FeatureFlags: lm,
LicenseManager: lm,
IntegrationsController: integrationsController,
CloudIntegrationsController: cloudIntegrationsController,
LogsParsingPipelineController: logParsingPipelineController,
Cache: c,
FluxInterval: fluxInterval,
Gateway: gatewayProxy,
UseLogsNewSchema: serverOptions.UseLogsNewSchema,
UseTraceNewSchema: serverOptions.UseTraceNewSchema,
UseLicensesV3: serverOptions.UseLicensesV3,
}
apiHandler, err := api.NewAPIHandler(apiOpts)
@@ -285,7 +291,7 @@ func NewServer(serverOptions *ServerOptions, config signoz.Config, signoz *signo
usageManager: usageManager,
}
httpServer, err := s.createPublicServer(apiHandler, signoz.Web)
httpServer, err := s.createPublicServer(apiHandler)
if err != nil {
return nil, err
@@ -334,7 +340,7 @@ func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server,
}, nil
}
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, error) {
r := baseapp.NewRouter()
@@ -362,7 +368,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterRoutes(r, am)
apiHandler.RegisterLogsRoutes(r, am)
apiHandler.RegisterIntegrationRoutes(r, am)
apiHandler.RegisterCloudIntegrationsRoutes(r, am)
apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterInfraMetricsRoutes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am)
@@ -379,11 +384,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
handler = handlers.CompressHandler(handler)
err := web.AddToRouter(r)
if err != nil {
return nil, err
}
return &http.Server{
Handler: handler,
}, nil
@@ -493,29 +493,32 @@ func extractQueryRangeData(path string, r *http.Request) (map[string]interface{}
zap.L().Error("error while matching the trace explorer: ", zap.Error(err))
}
queryInfoResult := telemetry.GetInstance().CheckQueryInfo(postData)
signozMetricsUsed := false
signozLogsUsed := false
signozTracesUsed := false
if postData != nil {
if (queryInfoResult.MetricsUsed || queryInfoResult.LogsUsed || queryInfoResult.TracesUsed) && (queryInfoResult.FilterApplied) {
if queryInfoResult.MetricsUsed {
if postData.CompositeQuery != nil {
data["queryType"] = postData.CompositeQuery.QueryType
data["panelType"] = postData.CompositeQuery.PanelType
signozLogsUsed, signozMetricsUsed, signozTracesUsed = telemetry.GetInstance().CheckSigNozSignals(postData)
}
}
if signozMetricsUsed || signozLogsUsed || signozTracesUsed {
if signozMetricsUsed {
telemetry.GetInstance().AddActiveMetricsUser()
}
if queryInfoResult.LogsUsed {
if signozLogsUsed {
telemetry.GetInstance().AddActiveLogsUser()
}
if queryInfoResult.TracesUsed {
if signozTracesUsed {
telemetry.GetInstance().AddActiveTracesUser()
}
data["metricsUsed"] = queryInfoResult.MetricsUsed
data["logsUsed"] = queryInfoResult.LogsUsed
data["tracesUsed"] = queryInfoResult.TracesUsed
data["filterApplied"] = queryInfoResult.FilterApplied
data["groupByApplied"] = queryInfoResult.GroupByApplied
data["aggregateOperator"] = queryInfoResult.AggregateOperator
data["aggregateAttributeKey"] = queryInfoResult.AggregateAttributeKey
data["numberOfQueries"] = queryInfoResult.NumberOfQueries
data["queryType"] = queryInfoResult.QueryType
data["panelType"] = queryInfoResult.PanelType
data["metricsUsed"] = signozMetricsUsed
data["logsUsed"] = signozLogsUsed
data["tracesUsed"] = signozTracesUsed
userEmail, err := baseauth.GetEmailFromJwt(r.Context())
if err == nil {
// switch case to set data["screen"] based on the referrer

View File

@@ -1,11 +1,18 @@
package dao
import (
"github.com/jmoiron/sqlx"
"fmt"
"go.signoz.io/signoz/ee/query-service/dao/sqlite"
)
func InitDao(inputDB *sqlx.DB) (ModelDao, error) {
return sqlite.InitDB(inputDB)
func InitDao(engine, path string) (ModelDao, error) {
switch engine {
case "sqlite":
return sqlite.InitDB(path)
default:
return nil, fmt.Errorf("qsdb type: %s is not supported in query service", engine)
}
}

View File

@@ -65,8 +65,8 @@ func columnExists(db *sqlx.DB, tableName, columnName string) bool {
}
// InitDB creates and extends base model DB repository
func InitDB(inputDB *sqlx.DB) (*modelDao, error) {
dao, err := basedsql.InitDB(inputDB)
func InitDB(dataSourceName string) (*modelDao, error) {
dao, err := basedsql.InitDB(dataSourceName)
if err != nil {
return nil, err
}

View File

@@ -2,6 +2,18 @@ package signozio
type status string
type ActivationResult struct {
Status status `json:"status"`
Data *ActivationResponse `json:"data,omitempty"`
ErrorType string `json:"errorType,omitempty"`
Error string `json:"error,omitempty"`
}
type ActivationResponse struct {
ActivationId string `json:"ActivationId"`
PlanDetails string `json:"PlanDetails"`
}
type ValidateLicenseResponse struct {
Status status `json:"status"`
Data map[string]interface{} `json:"data"`

View File

@@ -10,6 +10,7 @@ import (
"time"
"github.com/pkg/errors"
"go.uber.org/zap"
"go.signoz.io/signoz/ee/query-service/constants"
"go.signoz.io/signoz/ee/query-service/model"
@@ -38,6 +39,86 @@ func init() {
C = New()
}
// ActivateLicense sends key to license.signoz.io and gets activation data
func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError) {
licenseReq := map[string]string{
"key": key,
"siteId": siteId,
}
reqString, _ := json.Marshal(licenseReq)
httpResponse, err := http.Post(C.Prefix+"/licenses/activate", APPLICATION_JSON, bytes.NewBuffer(reqString))
if err != nil {
zap.L().Error("failed to connect to license.signoz.io", zap.Error(err))
return nil, model.BadRequest(fmt.Errorf("unable to connect with license.signoz.io, please check your network connection"))
}
httpBody, err := io.ReadAll(httpResponse.Body)
if err != nil {
zap.L().Error("failed to read activation response from license.signoz.io", zap.Error(err))
return nil, model.BadRequest(fmt.Errorf("failed to read activation response from license.signoz.io"))
}
defer httpResponse.Body.Close()
// read api request result
result := ActivationResult{}
err = json.Unmarshal(httpBody, &result)
if err != nil {
zap.L().Error("failed to marshal activation response from license.signoz.io", zap.Error(err))
return nil, model.InternalError(errors.Wrap(err, "failed to marshal license activation response"))
}
switch httpResponse.StatusCode {
case 200, 201:
return result.Data, nil
case 400, 401:
return nil, model.BadRequest(fmt.Errorf(fmt.Sprintf("failed to activate: %s", result.Error)))
default:
return nil, model.InternalError(fmt.Errorf(fmt.Sprintf("failed to activate: %s", result.Error)))
}
}
// ValidateLicense validates the license key
func ValidateLicense(activationId string) (*ActivationResponse, *model.ApiError) {
validReq := map[string]string{
"activationId": activationId,
}
reqString, _ := json.Marshal(validReq)
response, err := http.Post(C.Prefix+"/licenses/validate", APPLICATION_JSON, bytes.NewBuffer(reqString))
if err != nil {
return nil, model.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection"))
}
body, err := io.ReadAll(response.Body)
if err != nil {
return nil, model.BadRequest(errors.Wrap(err, "failed to read validation response from license.signoz.io"))
}
defer response.Body.Close()
switch response.StatusCode {
case 200, 201:
a := ActivationResult{}
err = json.Unmarshal(body, &a)
if err != nil {
return nil, model.BadRequest(errors.Wrap(err, "failed to marshal license validation response"))
}
return a.Data, nil
case 400, 401:
return nil, model.BadRequest(errors.Wrap(fmt.Errorf(string(body)),
"bad request error received from license.signoz.io"))
default:
return nil, model.InternalError(errors.Wrap(fmt.Errorf(string(body)),
"internal error received from license.signoz.io"))
}
}
func ValidateLicenseV3(licenseKey string) (*model.LicenseV3, *model.ApiError) {
// Creating an HTTP client with a timeout for better control

View File

@@ -18,13 +18,15 @@ import (
// Repo is license repo. stores license keys in a secured DB
type Repo struct {
db *sqlx.DB
db *sqlx.DB
useLicensesV3 bool
}
// NewLicenseRepo initiates a new license repo
func NewLicenseRepo(db *sqlx.DB) Repo {
func NewLicenseRepo(db *sqlx.DB, useLicensesV3 bool) Repo {
return Repo{
db: db,
db: db,
useLicensesV3: useLicensesV3,
}
}
@@ -110,16 +112,26 @@ func (r *Repo) GetActiveLicenseV2(ctx context.Context) (*model.License, *basemod
// GetActiveLicense fetches the latest active license from DB.
// If the license is not present, expect a nil license and a nil error in the output.
func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, *basemodel.ApiError) {
activeLicenseV3, err := r.GetActiveLicenseV3(ctx)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err))
if r.useLicensesV3 {
zap.L().Info("Using licenses v3 for GetActiveLicense")
activeLicenseV3, err := r.GetActiveLicenseV3(ctx)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err))
}
if activeLicenseV3 == nil {
return nil, nil
}
activeLicenseV2 := model.ConvertLicenseV3ToLicenseV2(activeLicenseV3)
return activeLicenseV2, nil
}
if activeLicenseV3 == nil {
return nil, nil
active, err := r.GetActiveLicenseV2(ctx)
if err != nil {
return nil, err
}
activeLicenseV2 := model.ConvertLicenseV3ToLicenseV2(activeLicenseV3)
return activeLicenseV2, nil
return active, nil
}
func (r *Repo) GetActiveLicenseV3(ctx context.Context) (*model.LicenseV3, error) {

View File

@@ -51,12 +51,12 @@ type Manager struct {
activeFeatures basemodel.FeatureSet
}
func StartManager(dbType string, db *sqlx.DB, features ...basemodel.Feature) (*Manager, error) {
func StartManager(dbType string, db *sqlx.DB, useLicensesV3 bool, features ...basemodel.Feature) (*Manager, error) {
if LM != nil {
return LM, nil
}
repo := NewLicenseRepo(db)
repo := NewLicenseRepo(db, useLicensesV3)
err := repo.InitDB(dbType)
if err != nil {
@@ -67,7 +67,32 @@ func StartManager(dbType string, db *sqlx.DB, features ...basemodel.Feature) (*M
repo: &repo,
}
if err := m.start(features...); err != nil {
if useLicensesV3 {
// get active license from the db
active, err := m.repo.GetActiveLicenseV2(context.Background())
if err != nil {
return m, err
}
// if we have an active license then need to fetch the complete details
if active != nil {
// fetch the new license structure from control plane
licenseV3, apiError := validate.ValidateLicenseV3(active.Key)
if apiError != nil {
return m, apiError
}
// insert the licenseV3 in sqlite db
apiError = m.repo.InsertLicenseV3(context.Background(), licenseV3)
// if the license already exists move ahead.
if apiError != nil && apiError.Typ != model.ErrorConflict {
return m, apiError
}
zap.L().Info("Successfully inserted license from v2 to v3 table")
}
}
if err := m.start(useLicensesV3, features...); err != nil {
return m, err
}
LM = m
@@ -75,8 +100,16 @@ func StartManager(dbType string, db *sqlx.DB, features ...basemodel.Feature) (*M
}
// start loads active license in memory and initiates validator
func (lm *Manager) start(features ...basemodel.Feature) error {
return lm.LoadActiveLicenseV3(features...)
func (lm *Manager) start(useLicensesV3 bool, features ...basemodel.Feature) error {
var err error
if useLicensesV3 {
err = lm.LoadActiveLicenseV3(features...)
} else {
err = lm.LoadActiveLicense(features...)
}
return err
}
func (lm *Manager) Stop() {
@@ -84,6 +117,31 @@ func (lm *Manager) Stop() {
<-lm.terminated
}
func (lm *Manager) SetActive(l *model.License, features ...basemodel.Feature) {
lm.mutex.Lock()
defer lm.mutex.Unlock()
if l == nil {
return
}
lm.activeLicense = l
lm.activeFeatures = append(l.FeatureSet, features...)
// set default features
setDefaultFeatures(lm)
err := lm.InitFeatures(lm.activeFeatures)
if err != nil {
zap.L().Panic("Couldn't activate features", zap.Error(err))
}
if !lm.validatorRunning {
// we want to make sure only one validator runs,
// we already have lock() so good to go
lm.validatorRunning = true
go lm.Validator(context.Background())
}
}
func (lm *Manager) SetActiveV3(l *model.LicenseV3, features ...basemodel.Feature) {
lm.mutex.Lock()
defer lm.mutex.Unlock()
@@ -114,6 +172,29 @@ func setDefaultFeatures(lm *Manager) {
lm.activeFeatures = append(lm.activeFeatures, baseconstants.DEFAULT_FEATURE_SET...)
}
// LoadActiveLicense loads the most recent active license
func (lm *Manager) LoadActiveLicense(features ...basemodel.Feature) error {
active, err := lm.repo.GetActiveLicense(context.Background())
if err != nil {
return err
}
if active != nil {
lm.SetActive(active, features...)
} else {
zap.L().Info("No active license found, defaulting to basic plan")
// if no active license is found, we default to basic(free) plan with all default features
lm.activeFeatures = model.BasicPlan
setDefaultFeatures(lm)
err := lm.InitFeatures(lm.activeFeatures)
if err != nil {
zap.L().Error("Couldn't initialize features", zap.Error(err))
return err
}
}
return nil
}
func (lm *Manager) LoadActiveLicenseV3(features ...basemodel.Feature) error {
active, err := lm.repo.GetActiveLicenseV3(context.Background())
if err != nil {
@@ -184,6 +265,31 @@ func (lm *Manager) GetLicensesV3(ctx context.Context) (response []*model.License
return response, nil
}
// Validator validates license after an epoch of time
func (lm *Manager) Validator(ctx context.Context) {
zap.L().Info("Validator started!")
defer close(lm.terminated)
tick := time.NewTicker(validationFrequency)
defer tick.Stop()
lm.Validate(ctx)
for {
select {
case <-lm.done:
return
default:
select {
case <-lm.done:
return
case <-tick.C:
lm.Validate(ctx)
}
}
}
}
// Validator validates license after an epoch of time
func (lm *Manager) ValidatorV3(ctx context.Context) {
zap.L().Info("ValidatorV3 started!")
@@ -209,6 +315,73 @@ func (lm *Manager) ValidatorV3(ctx context.Context) {
}
}
// Validate validates the current active license
func (lm *Manager) Validate(ctx context.Context) (reterr error) {
zap.L().Info("License validation started")
if lm.activeLicense == nil {
return nil
}
defer func() {
lm.mutex.Lock()
lm.lastValidated = time.Now().Unix()
if reterr != nil {
zap.L().Error("License validation completed with error", zap.Error(reterr))
atomic.AddUint64(&lm.failedAttempts, 1)
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED,
map[string]interface{}{"err": reterr.Error()}, "", true, false)
} else {
zap.L().Info("License validation completed with no errors")
}
lm.mutex.Unlock()
}()
response, apiError := validate.ValidateLicense(lm.activeLicense.ActivationId)
if apiError != nil {
zap.L().Error("failed to validate license", zap.Error(apiError.Err))
return apiError.Err
}
if response.PlanDetails == lm.activeLicense.PlanDetails {
// license plan hasnt changed, nothing to do
return nil
}
if response.PlanDetails != "" {
// copy and replace the active license record
l := model.License{
Key: lm.activeLicense.Key,
CreatedAt: lm.activeLicense.CreatedAt,
PlanDetails: response.PlanDetails,
ValidationMessage: lm.activeLicense.ValidationMessage,
ActivationId: lm.activeLicense.ActivationId,
}
if err := l.ParsePlan(); err != nil {
zap.L().Error("failed to parse updated license", zap.Error(err))
return err
}
// updated plan is parsable, check if plan has changed
if lm.activeLicense.PlanDetails != response.PlanDetails {
err := lm.repo.UpdatePlanDetails(ctx, lm.activeLicense.Key, response.PlanDetails)
if err != nil {
// unexpected db write issue but we can let the user continue
// and wait for update to work in next cycle.
zap.L().Error("failed to validate license", zap.Error(err))
}
}
// activate the update license plan
lm.SetActive(&l)
}
return nil
}
func (lm *Manager) RefreshLicense(ctx context.Context) *model.ApiError {
license, apiError := validate.ValidateLicenseV3(lm.activeLicenseV3.Key)
@@ -256,6 +429,50 @@ func (lm *Manager) ValidateV3(ctx context.Context) (reterr error) {
return nil
}
// Activate activates a license key with signoz server
func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *model.License, errResponse *model.ApiError) {
defer func() {
if errResponse != nil {
userEmail, err := auth.GetEmailFromJwt(ctx)
if err == nil {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED,
map[string]interface{}{"err": errResponse.Err.Error()}, userEmail, true, false)
}
}
}()
response, apiError := validate.ActivateLicense(key, "")
if apiError != nil {
zap.L().Error("failed to activate license", zap.Error(apiError.Err))
return nil, apiError
}
l := &model.License{
Key: key,
ActivationId: response.ActivationId,
PlanDetails: response.PlanDetails,
}
// parse validity and features from the plan details
err := l.ParsePlan()
if err != nil {
zap.L().Error("failed to activate license", zap.Error(err))
return nil, model.InternalError(err)
}
// store the license before activating it
err = lm.repo.InsertLicense(ctx, l)
if err != nil {
zap.L().Error("failed to activate license", zap.Error(err))
return nil, model.InternalError(err)
}
// license is valid, activate it
lm.SetActive(l)
return l, nil
}
func (lm *Manager) ActivateV3(ctx context.Context, licenseKey string) (licenseResponse *model.LicenseV3, errResponse *model.ApiError) {
defer func() {
if errResponse != nil {

View File

@@ -3,28 +3,82 @@ package main
import (
"context"
"flag"
"fmt"
"log"
"os"
"os/signal"
"strconv"
"syscall"
"time"
"go.opentelemetry.io/otel/sdk/resource"
semconv "go.opentelemetry.io/otel/semconv/v1.4.0"
"go.signoz.io/signoz/ee/query-service/app"
"go.signoz.io/signoz/pkg/config"
signozconfig "go.signoz.io/signoz/pkg/config"
"go.signoz.io/signoz/pkg/config/envprovider"
"go.signoz.io/signoz/pkg/instrumentation"
"go.signoz.io/signoz/pkg/query-service/auth"
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
"go.signoz.io/signoz/pkg/query-service/migrate"
"go.signoz.io/signoz/pkg/query-service/version"
"go.signoz.io/signoz/pkg/signoz"
pkgversion "go.signoz.io/signoz/pkg/version"
"google.golang.org/grpc"
"google.golang.org/grpc/credentials/insecure"
prommodel "github.com/prometheus/common/model"
zapotlpencoder "github.com/SigNoz/zap_otlp/zap_otlp_encoder"
zapotlpsync "github.com/SigNoz/zap_otlp/zap_otlp_sync"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
)
func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger {
config := zap.NewProductionConfig()
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt)
defer stop()
config.EncoderConfig.EncodeDuration = zapcore.MillisDurationEncoder
config.EncoderConfig.EncodeLevel = zapcore.CapitalLevelEncoder
config.EncoderConfig.TimeKey = "timestamp"
config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
otlpEncoder := zapotlpencoder.NewOTLPEncoder(config.EncoderConfig)
consoleEncoder := zapcore.NewJSONEncoder(config.EncoderConfig)
defaultLogLevel := zapcore.InfoLevel
res := resource.NewWithAttributes(
semconv.SchemaURL,
semconv.ServiceNameKey.String("query-service"),
)
core := zapcore.NewTee(
zapcore.NewCore(consoleEncoder, os.Stdout, defaultLogLevel),
)
if enableQueryServiceLogOTLPExport {
ctx, cancel := context.WithTimeout(ctx, time.Second*30)
defer cancel()
conn, err := grpc.DialContext(ctx, baseconst.OTLPTarget, grpc.WithBlock(), grpc.WithTransportCredentials(insecure.NewCredentials()))
if err != nil {
log.Fatalf("failed to establish connection: %v", err)
} else {
logExportBatchSizeInt, err := strconv.Atoi(baseconst.LogExportBatchSize)
if err != nil {
logExportBatchSizeInt = 512
}
ws := zapcore.AddSync(zapotlpsync.NewOtlpSyncer(conn, zapotlpsync.Options{
BatchSize: logExportBatchSizeInt,
ResourceSchema: semconv.SchemaURL,
Resource: res,
}))
core = zapcore.NewTee(
zapcore.NewCore(consoleEncoder, os.Stdout, defaultLogLevel),
zapcore.NewCore(otlpEncoder, zapcore.NewMultiWriteSyncer(ws), defaultLogLevel),
)
}
}
logger := zap.New(core, zap.AddCaller(), zap.AddStacktrace(zapcore.ErrorLevel))
return logger
}
func init() {
prommodel.NameValidationScheme = prommodel.UTF8Validation
}
@@ -41,17 +95,19 @@ func main() {
var useLogsNewSchema bool
var useTraceNewSchema bool
var useLicensesV3 bool
var cacheConfigPath, fluxInterval string
var enableQueryServiceLogOTLPExport bool
var preferSpanMetrics bool
var maxIdleConns int
var maxOpenConns int
var dialTimeout time.Duration
var gatewayUrl string
var useLicensesV3 bool
flag.BoolVar(&useLogsNewSchema, "use-logs-new-schema", false, "use logs_v2 schema for logs")
flag.BoolVar(&useTraceNewSchema, "use-trace-new-schema", false, "use new schema for traces")
flag.BoolVar(&useLicensesV3, "use-licenses-v3", false, "use licenses_v3 schema for licenses")
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
@@ -62,38 +118,19 @@ func main() {
flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)")
flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)")
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)")
flag.BoolVar(&enableQueryServiceLogOTLPExport, "enable.query.service.log.otlp.export", false, "(enable query service log otlp export)")
flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')")
flag.StringVar(&gatewayUrl, "gateway-url", "", "(url to the gateway)")
flag.BoolVar(&useLicensesV3, "use-licenses-v3", false, "use licenses_v3 schema for licenses")
flag.Parse()
loggerMgr := initZapLog(enableQueryServiceLogOTLPExport)
zap.ReplaceGlobals(loggerMgr)
defer loggerMgr.Sync() // flushes buffer, if any
version.PrintVersion()
config, err := signoz.NewConfig(context.Background(), signozconfig.ResolverConfig{
Uris: []string{"env:"},
ProviderFactories: []config.ProviderFactory{
envprovider.NewFactory(),
},
})
if err != nil {
zap.L().Fatal("Failed to create config", zap.Error(err))
}
instrumentation, err := instrumentation.New(context.Background(), pkgversion.Build{}, config.Instrumentation)
if err != nil {
fmt.Println(err, err.Error())
zap.L().Fatal("Failed to create instrumentation", zap.Error(err))
}
defer instrumentation.Stop(context.Background())
zap.ReplaceGlobals(instrumentation.Logger())
defer instrumentation.Logger().Sync() // flushes buffer, if any
signoz, err := signoz.New(context.Background(), instrumentation, config, signoz.NewProviderFactories())
if err != nil {
zap.L().Fatal("Failed to create signoz struct", zap.Error(err))
}
serverOptions := &app.ServerOptions{
HTTPHostPort: baseconst.HTTPHostPort,
PromConfigPath: promConfigPath,
@@ -111,6 +148,7 @@ func main() {
GatewayUrl: gatewayUrl,
UseLogsNewSchema: useLogsNewSchema,
UseTraceNewSchema: useTraceNewSchema,
UseLicensesV3: useLicensesV3,
}
// Read the jwt secret key
@@ -122,7 +160,13 @@ func main() {
zap.L().Info("JWT secret key set successfully.")
}
server, err := app.NewServer(serverOptions, config, signoz)
if err := migrate.Migrate(baseconst.RELATIONAL_DATASOURCE_PATH); err != nil {
zap.L().Error("Failed to migrate", zap.Error(err))
} else {
zap.L().Info("Migration successful")
}
server, err := app.NewServer(serverOptions)
if err != nil {
zap.L().Fatal("Failed to create server", zap.Error(err))
}

View File

@@ -13,3 +13,8 @@ if [ "$branch" = "main" ]; then
echo "${color_red}${bold}You can't commit directly to the main branch${reset}"
exit 1
fi
if [ "$branch" = "develop" ]; then
echo "${color_red}${bold}You can't commit directly to the develop branch${reset}"
exit 1
fi

View File

@@ -24,7 +24,7 @@ const config: Config.InitialOptions = {
'^.+\\.(js|jsx)$': 'babel-jest',
},
transformIgnorePatterns: [
'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios|@signozhq/design-tokens|d3-interpolate|d3-color|api)/)',
'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios|@signozhq/design-tokens|d3-interpolate|d3-color)/)',
],
setupFilesAfterEnv: ['<rootDir>jest.setup.ts'],
testPathIgnorePatterns: ['/node_modules/', '/public/'],

View File

@@ -21,7 +21,7 @@
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
"commitlint": "commitlint --edit $1",
"test": "jest --coverage",
"test:changedsince": "jest --changedSince=main --coverage --silent"
"test:changedsince": "jest --changedSince=develop --coverage --silent"
},
"engines": {
"node": ">=16.15.0"
@@ -43,7 +43,6 @@
"@sentry/react": "8.41.0",
"@sentry/webpack-plugin": "2.22.6",
"@signozhq/design-tokens": "1.1.4",
"@tanstack/react-table": "8.20.6",
"@uiw/react-md-editor": "3.23.5",
"@visx/group": "3.3.0",
"@visx/shape": "3.5.0",
@@ -154,7 +153,6 @@
"@babel/preset-typescript": "^7.21.4",
"@commitlint/cli": "^16.3.0",
"@commitlint/config-conventional": "^16.2.4",
"@faker-js/faker": "9.3.0",
"@jest/globals": "^27.5.1",
"@playwright/test": "^1.22.0",
"@testing-library/jest-dom": "5.16.5",
@@ -244,8 +242,6 @@
"xml2js": "0.5.0",
"phin": "^3.7.1",
"body-parser": "1.20.3",
"http-proxy-middleware": "3.0.3",
"cross-spawn": "7.0.5",
"cookie": "^0.7.1"
"http-proxy-middleware": "3.0.3"
}
}

View File

@@ -3,10 +3,9 @@
"alert_channels": "Alert Channels",
"organization_settings": "Organization Settings",
"ingestion_settings": "Ingestion Settings",
"api_keys": "API Keys",
"api_keys": "Access Tokens",
"my_settings": "My Settings",
"overview_metrics": "Overview Metrics",
"custom_domain_settings": "Custom Domain Settings",
"dbcall_metrics": "Database Calls",
"external_metrics": "External Calls",
"pipeline": "Pipeline",

View File

@@ -26,8 +26,7 @@
"MY_SETTINGS": "SigNoz | My Settings",
"ORG_SETTINGS": "SigNoz | Organization Settings",
"INGESTION_SETTINGS": "SigNoz | Ingestion Settings",
"API_KEYS": "SigNoz | API Keys",
"CUSTOM_DOMAIN_SETTINGS": "SigNoz | Custom Domain Settings",
"API_KEYS": "SigNoz | Access Tokens",
"SOMETHING_WENT_WRONG": "SigNoz | Something Went Wrong",
"UN_AUTHORIZED": "SigNoz | Unauthorized",
"NOT_FOUND": "SigNoz | Page Not Found",
@@ -43,6 +42,5 @@
"DEFAULT": "Open source Observability Platform | SigNoz",
"ALERT_HISTORY": "SigNoz | Alert Rule History",
"ALERT_OVERVIEW": "SigNoz | Alert Rule Overview",
"INFRASTRUCTURE_MONITORING_HOSTS": "SigNoz | Infra Monitoring",
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring"
"INFRASTRUCTURE_MONITORING_HOSTS": "SigNoz | Infra Monitoring"
}

View File

@@ -1,3 +1,3 @@
{
"delete_confirm_message": "Are you sure you want to delete {{keyName}} key? Deleting a key is irreversible and cannot be undone."
"delete_confirm_message": "Are you sure you want to delete {{keyName}} token? Deleting a token is irreversible and cannot be undone."
}

View File

@@ -3,9 +3,7 @@
"billing": "Billing",
"manage_billing_and_costs": "Manage your billing information, invoices, and monitor costs.",
"enterprise_cloud": "Enterprise Cloud",
"teams_cloud": "Teams Cloud",
"enterprise": "Enterprise",
"teams": "Teams",
"card_details_recieved_and_billing_info": "We have received your card details, your billing will only start after the end of your free trial period.",
"upgrade_plan": "Upgrade Plan",
"manage_billing": "Manage Billing",

View File

@@ -3,10 +3,9 @@
"alert_channels": "Alert Channels",
"organization_settings": "Organization Settings",
"ingestion_settings": "Ingestion Settings",
"api_keys": "API Keys",
"api_keys": "Access Tokens",
"my_settings": "My Settings",
"overview_metrics": "Overview Metrics",
"custom_domain_settings": "Custom Domain Settings",
"dbcall_metrics": "Database Calls",
"external_metrics": "External Calls",
"pipeline": "Pipeline",

View File

@@ -32,8 +32,7 @@
"MY_SETTINGS": "SigNoz | My Settings",
"ORG_SETTINGS": "SigNoz | Organization Settings",
"INGESTION_SETTINGS": "SigNoz | Ingestion Settings",
"API_KEYS": "SigNoz | API Keys",
"CUSTOM_DOMAIN_SETTINGS": "SigNoz | Custom Domain Settings",
"API_KEYS": "SigNoz | Access Tokens",
"SOMETHING_WENT_WRONG": "SigNoz | Something Went Wrong",
"UN_AUTHORIZED": "SigNoz | Unauthorized",
"NOT_FOUND": "SigNoz | Page Not Found",
@@ -56,6 +55,5 @@
"ALERT_HISTORY": "SigNoz | Alert Rule History",
"ALERT_OVERVIEW": "SigNoz | Alert Rule Overview",
"MESSAGING_QUEUES": "SigNoz | Messaging Queues",
"INFRASTRUCTURE_MONITORING_HOSTS": "SigNoz | Infra Monitoring",
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring"
"INFRASTRUCTURE_MONITORING_HOSTS": "SigNoz | Infra Monitoring"
}

View File

@@ -1,17 +1,29 @@
/* eslint-disable react-hooks/exhaustive-deps */
import getLocalStorageApi from 'api/browser/localstorage/get';
import setLocalStorageApi from 'api/browser/localstorage/set';
import getOrgUser from 'api/user/getOrgUser';
import loginApi from 'api/user/login';
import { Logout } from 'api/utils';
import Spinner from 'components/Spinner';
import { LOCALSTORAGE } from 'constants/localStorage';
import ROUTES from 'constants/routes';
import useLicense from 'hooks/useLicense';
import { useNotifications } from 'hooks/useNotifications';
import history from 'lib/history';
import { isEmpty } from 'lodash-es';
import { isEmpty, isNull } from 'lodash-es';
import { useAppContext } from 'providers/App/App';
import { ReactChild, useCallback, useEffect, useMemo, useState } from 'react';
import { ReactChild, useEffect, useMemo, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useQuery } from 'react-query';
import { matchPath, useLocation } from 'react-router-dom';
import { useDispatch, useSelector } from 'react-redux';
import { matchPath, Redirect, useLocation } from 'react-router-dom';
import { Dispatch } from 'redux';
import { AppState } from 'store/reducers';
import { getInitialUserTokenRefreshToken } from 'store/utils';
import AppActions from 'types/actions';
import { UPDATE_USER_IS_FETCH } from 'types/actions/app';
import { LicenseState, LicenseStatus } from 'types/api/licensesV3/getActive';
import { Organization } from 'types/api/user/getOrganization';
import { USER_ROLES } from 'types/roles';
import AppReducer from 'types/reducer/app';
import { isCloudUser } from 'utils/app';
import { routePermission } from 'utils/permission';
@@ -19,30 +31,32 @@ import routes, {
LIST_LICENSES,
oldNewRoutesMapping,
oldRoutes,
ROUTES_NOT_TO_BE_OVERRIDEN,
SUPPORT_ROUTE,
} from './routes';
import afterLogin from './utils';
function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
const location = useLocation();
const { pathname } = location;
const [isLoading, setIsLoading] = useState<boolean>(true);
const {
org,
orgPreferences,
user,
role,
isUserFetching,
isUserFetchingError,
isLoggedIn: isLoggedInState,
isFetchingOrgPreferences,
licenses,
isFetchingLicenses,
activeLicenseV3,
isFetchingActiveLicenseV3,
} = useAppContext();
} = useSelector<AppState, AppReducer>((state) => state.app);
const { activeLicenseV3, isFetchingActiveLicenseV3 } = useAppContext();
const isAdmin = user.role === USER_ROLES.ADMIN;
const mapRoutes = useMemo(
() =>
new Map(
[...routes, LIST_LICENSES, SUPPORT_ROUTE].map((e) => {
[...routes, LIST_LICENSES].map((e) => {
const currentPath = matchPath(pathname, {
path: e.path,
});
@@ -51,13 +65,52 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
),
[pathname],
);
const isOldRoute = oldRoutes.indexOf(pathname) > -1;
const currentRoute = mapRoutes.get('current');
const isOnboardingComplete = useMemo(
() =>
orgPreferences?.find(
(preference: Record<string, any>) => preference.key === 'ORG_ONBOARDING',
)?.value,
[orgPreferences],
);
const {
data: licensesData,
isFetching: isFetchingLicensesData,
} = useLicense();
const { t } = useTranslation(['common']);
const isCloudUserVal = isCloudUser();
const localStorageUserAuthToken = getInitialUserTokenRefreshToken();
const dispatch = useDispatch<Dispatch<AppActions>>();
const { notifications } = useNotifications();
const currentRoute = mapRoutes.get('current');
const isOldRoute = oldRoutes.indexOf(pathname) > -1;
const [orgData, setOrgData] = useState<Organization | undefined>(undefined);
const { data: orgUsers, isFetching: isFetchingOrgUsers } = useQuery({
const isLocalStorageLoggedIn =
getLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN) === 'true';
const navigateToLoginIfNotLoggedIn = (isLoggedIn = isLoggedInState): void => {
dispatch({
type: UPDATE_USER_IS_FETCH,
payload: {
isUserFetching: false,
},
});
if (!isLoggedIn) {
history.push(ROUTES.LOGIN, { from: pathname });
}
};
const { data: orgUsers, isLoading: isLoadingOrgUsers } = useQuery({
queryFn: () => {
if (orgData && orgData.id !== undefined) {
return getOrgUser({
@@ -67,10 +120,10 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
return undefined;
},
queryKey: ['getOrgUser'],
enabled: !isEmpty(orgData) && user.role === 'ADMIN',
enabled: !isEmpty(orgData),
});
const checkFirstTimeUser = useCallback((): boolean => {
const checkFirstTimeUser = (): boolean => {
const users = orgUsers?.payload || [];
const remainingUsers = users.filter(
@@ -78,91 +131,154 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
);
return remainingUsers.length === 1;
}, [orgUsers?.payload]);
};
useEffect(() => {
// Check if the onboarding should be shown based on the org users and onboarding completion status, wait for org users and preferences to load
const shouldShowOnboarding = (): boolean => {
// Only run this effect if the org users and preferences are loaded
if (!isLoadingOrgUsers && !isFetchingOrgPreferences) {
const isFirstUser = checkFirstTimeUser();
// Redirect to get started if it's not the first user or if the onboarding is complete
return isFirstUser && !isOnboardingComplete;
}
return false;
};
const handleRedirectForOrgOnboarding = (key: string): void => {
if (
isLoggedInState &&
isCloudUserVal &&
!isFetchingOrgPreferences &&
orgPreferences &&
!isFetchingOrgUsers &&
orgUsers &&
orgUsers.payload
!isLoadingOrgUsers &&
!isEmpty(orgUsers?.payload) &&
!isNull(orgPreferences)
) {
const isOnboardingComplete = orgPreferences?.find(
(preference: Record<string, any>) => preference.key === 'ORG_ONBOARDING',
)?.value;
if (key === 'ONBOARDING' && isOnboardingComplete) {
history.push(ROUTES.APPLICATION);
}
const isFirstUser = checkFirstTimeUser();
if (
isFirstUser &&
!isOnboardingComplete &&
// if the current route is allowed to be overriden by org onboarding then only do the same
!ROUTES_NOT_TO_BE_OVERRIDEN.includes(pathname)
) {
const isFirstTimeUser = checkFirstTimeUser();
if (isFirstTimeUser && !isOnboardingComplete) {
history.push(ROUTES.ONBOARDING);
}
}
}, [
checkFirstTimeUser,
isCloudUserVal,
isFetchingOrgPreferences,
isFetchingOrgUsers,
orgPreferences,
orgUsers,
pathname,
]);
if (!isCloudUserVal && key === 'ONBOARDING') {
history.push(ROUTES.APPLICATION);
}
};
const handleUserLoginIfTokenPresent = async (
key: keyof typeof ROUTES,
): Promise<void> => {
if (localStorageUserAuthToken?.refreshJwt) {
// localstorage token is present
// renew web access token
const response = await loginApi({
refreshToken: localStorageUserAuthToken?.refreshJwt,
});
if (response.statusCode === 200) {
const route = routePermission[key];
// get all resource and put it over redux
const userResponse = await afterLogin(
response.payload.userId,
response.payload.accessJwt,
response.payload.refreshJwt,
);
handleRedirectForOrgOnboarding(key);
if (
userResponse &&
route &&
route.find((e) => e === userResponse.payload.role) === undefined
) {
history.push(ROUTES.UN_AUTHORIZED);
}
} else {
Logout();
notifications.error({
message: response.error || t('something_went_wrong'),
});
}
}
};
const handlePrivateRoutes = async (
key: keyof typeof ROUTES,
): Promise<void> => {
if (
localStorageUserAuthToken &&
localStorageUserAuthToken.refreshJwt &&
isUserFetching
) {
handleUserLoginIfTokenPresent(key);
} else {
handleRedirectForOrgOnboarding(key);
navigateToLoginIfNotLoggedIn(isLocalStorageLoggedIn);
}
};
const navigateToWorkSpaceBlocked = (route: any): void => {
const { path } = route;
const isRouteEnabledForWorkspaceBlockedState =
isAdmin &&
(path === ROUTES.ORG_SETTINGS ||
path === ROUTES.BILLING ||
path === ROUTES.MY_SETTINGS);
if (
path &&
path !== ROUTES.WORKSPACE_LOCKED &&
!isRouteEnabledForWorkspaceBlockedState
) {
if (path && path !== ROUTES.WORKSPACE_LOCKED) {
history.push(ROUTES.WORKSPACE_LOCKED);
dispatch({
type: UPDATE_USER_IS_FETCH,
payload: {
isUserFetching: false,
},
});
}
};
useEffect(() => {
if (!isFetchingLicenses) {
const currentRoute = mapRoutes.get('current');
const shouldBlockWorkspace = licenses?.workSpaceBlock;
if (!isFetchingLicensesData) {
const shouldBlockWorkspace = licensesData?.payload?.workSpaceBlock;
if (shouldBlockWorkspace && currentRoute) {
if (shouldBlockWorkspace) {
navigateToWorkSpaceBlocked(currentRoute);
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [isFetchingLicenses, licenses?.workSpaceBlock, mapRoutes, pathname]);
}, [isFetchingLicensesData]);
const navigateToWorkSpaceSuspended = (route: any): void => {
const { path } = route;
if (path && path !== ROUTES.WORKSPACE_SUSPENDED) {
history.push(ROUTES.WORKSPACE_SUSPENDED);
dispatch({
type: UPDATE_USER_IS_FETCH,
payload: {
isUserFetching: false,
},
});
}
};
useEffect(() => {
if (!isFetchingActiveLicenseV3 && activeLicenseV3) {
const currentRoute = mapRoutes.get('current');
const shouldSuspendWorkspace =
activeLicenseV3.status === LicenseStatus.SUSPENDED &&
activeLicenseV3.state === LicenseState.PAYMENT_FAILED;
if (shouldSuspendWorkspace && currentRoute) {
if (shouldSuspendWorkspace) {
navigateToWorkSpaceSuspended(currentRoute);
}
}
}, [isFetchingActiveLicenseV3, activeLicenseV3, mapRoutes, pathname]);
}, [isFetchingActiveLicenseV3, activeLicenseV3]);
useEffect(() => {
if (org && org.length > 0 && org[0].id !== undefined) {
@@ -170,70 +286,103 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
}
}, [org]);
const handleRouting = (): void => {
const showOrgOnboarding = shouldShowOnboarding();
if (showOrgOnboarding && !isOnboardingComplete && isCloudUserVal) {
history.push(ROUTES.ONBOARDING);
} else {
history.push(ROUTES.APPLICATION);
}
};
useEffect(() => {
const { isPrivate } = currentRoute || {
isPrivate: false,
};
if (isLoggedInState && role && role !== 'ADMIN') {
setIsLoading(false);
}
if (!isPrivate) {
setIsLoading(false);
}
if (
!isEmpty(user) &&
!isFetchingOrgPreferences &&
!isEmpty(orgUsers?.payload) &&
!isNull(orgPreferences)
) {
setIsLoading(false);
}
}, [currentRoute, user, role, orgUsers, orgPreferences]);
// eslint-disable-next-line sonarjs/cognitive-complexity
useEffect(() => {
// if it is an old route navigate to the new route
if (isOldRoute) {
const redirectUrl = oldNewRoutesMapping[pathname];
(async (): Promise<void> => {
try {
if (isOldRoute) {
const redirectUrl = oldNewRoutesMapping[pathname];
const newLocation = {
...location,
pathname: redirectUrl,
};
history.replace(newLocation);
return;
}
// if the current route
if (currentRoute) {
const { isPrivate, key } = currentRoute;
if (isPrivate) {
if (isLoggedInState) {
const route = routePermission[key];
if (route && route.find((e) => e === user.role) === undefined) {
history.push(ROUTES.UN_AUTHORIZED);
const newLocation = {
...location,
pathname: redirectUrl,
};
history.replace(newLocation);
}
if (currentRoute) {
const { isPrivate, key } = currentRoute;
if (isPrivate && key !== String(ROUTES.WORKSPACE_LOCKED)) {
handlePrivateRoutes(key);
} else {
// no need to fetch the user and make user fetching false
if (getLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN) === 'true') {
handleRouting();
}
dispatch({
type: UPDATE_USER_IS_FETCH,
payload: {
isUserFetching: false,
},
});
}
} else if (pathname === ROUTES.HOME_PAGE) {
// routing to application page over root page
if (isLoggedInState) {
handleRouting();
} else {
navigateToLoginIfNotLoggedIn();
}
} else {
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, pathname);
history.push(ROUTES.LOGIN);
// not found
navigateToLoginIfNotLoggedIn(isLocalStorageLoggedIn);
}
} else if (isLoggedInState) {
const fromPathname = getLocalStorageApi(
LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT,
);
if (fromPathname) {
history.push(fromPathname);
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, '');
} else if (pathname !== ROUTES.SOMETHING_WENT_WRONG) {
history.push(ROUTES.APPLICATION);
}
} else {
// do nothing as the unauthenticated routes are LOGIN and SIGNUP and the LOGIN container takes care of routing to signup if
// setup is not completed
} catch (error) {
// something went wrong
history.push(ROUTES.SOMETHING_WENT_WRONG);
}
} else if (isLoggedInState) {
const fromPathname = getLocalStorageApi(
LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT,
);
if (fromPathname) {
history.push(fromPathname);
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, '');
} else {
history.push(ROUTES.APPLICATION);
}
} else {
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, pathname);
history.push(ROUTES.LOGIN);
}
})();
}, [
licenses,
dispatch,
isLoggedInState,
pathname,
user,
isOldRoute,
currentRoute,
location,
licensesData,
orgUsers,
orgPreferences,
]);
if (isUserFetchingError) {
return <Redirect to={ROUTES.SOMETHING_WENT_WRONG} />;
}
if (isUserFetching || isLoading) {
return <Spinner tip="Loading..." />;
}
// NOTE: disabling this rule as there is no need to have div
// eslint-disable-next-line react/jsx-no-useless-fragment
return <>{children}</>;

View File

@@ -1,6 +1,8 @@
import { ConfigProvider } from 'antd';
import getLocalStorageApi from 'api/browser/localstorage/get';
import setLocalStorageApi from 'api/browser/localstorage/set';
import logEvent from 'api/common/logEvent';
import getAllOrgPreferences from 'api/preferences/getAllOrgPreferences';
import NotFound from 'components/NotFound';
import Spinner from 'components/Spinner';
import { FeatureKeys } from 'constants/features';
@@ -9,21 +11,35 @@ import ROUTES from 'constants/routes';
import AppLayout from 'container/AppLayout';
import useAnalytics from 'hooks/analytics/useAnalytics';
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
import { useThemeConfig } from 'hooks/useDarkMode';
import { LICENSE_PLAN_KEY } from 'hooks/useLicense';
import { useIsDarkMode, useThemeConfig } from 'hooks/useDarkMode';
import { THEME_MODE } from 'hooks/useDarkMode/constant';
import useFeatureFlags from 'hooks/useFeatureFlag';
import useGetFeatureFlag from 'hooks/useGetFeatureFlag';
import useLicense, { LICENSE_PLAN_KEY } from 'hooks/useLicense';
import { NotificationProvider } from 'hooks/useNotifications';
import { ResourceProvider } from 'hooks/useResourceAttribute';
import history from 'lib/history';
import { identity, pickBy } from 'lodash-es';
import { identity, pick, pickBy } from 'lodash-es';
import posthog from 'posthog-js';
import AlertRuleProvider from 'providers/Alert';
import { useAppContext } from 'providers/App/App';
import { IUser } from 'providers/App/types';
import { AppProvider } from 'providers/App/App';
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
import { QueryBuilderProvider } from 'providers/QueryBuilder';
import { Suspense, useCallback, useEffect, useState } from 'react';
import { Suspense, useEffect, useState } from 'react';
import { useQuery } from 'react-query';
import { useDispatch, useSelector } from 'react-redux';
import { Route, Router, Switch } from 'react-router-dom';
import { CompatRouter } from 'react-router-dom-v5-compat';
import { Dispatch } from 'redux';
import { AppState } from 'store/reducers';
import AppActions from 'types/actions';
import {
UPDATE_FEATURE_FLAG_RESPONSE,
UPDATE_IS_FETCHING_ORG_PREFERENCES,
UPDATE_ORG_PREFERENCES,
} from 'types/actions/app';
import AppReducer, { User } from 'types/reducer/app';
import { USER_ROLES } from 'types/roles';
import { extractDomain, isCloudUser, isEECloudUser } from 'utils/app';
import PrivateRoute from './Private';
@@ -35,20 +51,14 @@ import defaultRoutes, {
function App(): JSX.Element {
const themeConfig = useThemeConfig();
const {
licenses,
user,
isFetchingUser,
isFetchingLicenses,
isFetchingFeatureFlags,
userFetchError,
licensesFetchError,
featureFlagsFetchError,
isLoggedIn: isLoggedInState,
featureFlags,
org,
} = useAppContext();
const { data: licenseData } = useLicense();
const [routes, setRoutes] = useState<AppRoutes[]>(defaultRoutes);
const { role, isLoggedIn: isLoggedInState, user, org } = useSelector<
AppState,
AppReducer
>((state) => state.app);
const dispatch = useDispatch<Dispatch<AppActions>>();
const { trackPageView } = useAnalytics();
@@ -56,114 +66,164 @@ function App(): JSX.Element {
const isCloudUserVal = isCloudUser();
const enableAnalytics = useCallback(
(user: IUser): void => {
// wait for the required data to be loaded before doing init for anything!
if (!isFetchingLicenses && licenses && org) {
const orgName =
org && Array.isArray(org) && org.length > 0 ? org[0].name : '';
const isDarkMode = useIsDarkMode();
const { name, email, role } = user;
const isChatSupportEnabled =
useFeatureFlags(FeatureKeys.CHAT_SUPPORT)?.active || false;
const identifyPayload = {
email,
name,
company_name: orgName,
role,
source: 'signoz-ui',
};
const isPremiumSupportEnabled =
useFeatureFlags(FeatureKeys.PREMIUM_SUPPORT)?.active || false;
const sanitizedIdentifyPayload = pickBy(identifyPayload, identity);
const domain = extractDomain(email);
const hostNameParts = hostname.split('.');
const { data: orgPreferences, isLoading: isLoadingOrgPreferences } = useQuery({
queryFn: () => getAllOrgPreferences(),
queryKey: ['getOrgPreferences'],
enabled: isLoggedInState && role === USER_ROLES.ADMIN,
});
const groupTraits = {
name: orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
};
window.analytics.identify(email, sanitizedIdentifyPayload);
window.analytics.group(domain, groupTraits);
posthog?.identify(email, {
email,
name,
orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
isPaidUser: !!licenses?.trialConvertedToSubscription,
});
posthog?.group('company', domain, {
name: orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
isPaidUser: !!licenses?.trialConvertedToSubscription,
});
}
},
[hostname, isFetchingLicenses, licenses, org],
);
// eslint-disable-next-line sonarjs/cognitive-complexity
useEffect(() => {
if (
!isFetchingLicenses &&
licenses &&
!isFetchingUser &&
user &&
!!user.email
) {
const isOnBasicPlan =
licenses.licenses?.some(
(license) =>
license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN,
) || licenses.licenses === null;
if (orgPreferences && !isLoadingOrgPreferences) {
dispatch({
type: UPDATE_IS_FETCHING_ORG_PREFERENCES,
payload: {
isFetchingOrgPreferences: false,
},
});
const isIdentifiedUser = getLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER);
if (isLoggedInState && user && user.id && user.email && !isIdentifiedUser) {
setLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER, 'true');
}
let updatedRoutes = defaultRoutes;
// if the user is a cloud user
if (isCloudUserVal || isEECloudUser()) {
// if the user is on basic plan then remove billing
if (isOnBasicPlan) {
updatedRoutes = updatedRoutes.filter(
(route) => route?.path !== ROUTES.BILLING,
);
}
// always add support route for cloud users
updatedRoutes = [...updatedRoutes, SUPPORT_ROUTE];
} else {
// if not a cloud user then remove billing and add list licenses route
updatedRoutes = updatedRoutes.filter(
(route) => route?.path !== ROUTES.BILLING,
);
updatedRoutes = [...updatedRoutes, LIST_LICENSES];
}
setRoutes(updatedRoutes);
dispatch({
type: UPDATE_ORG_PREFERENCES,
payload: {
orgPreferences: orgPreferences.payload?.data || null,
},
});
}
}, [
isLoggedInState,
user,
licenses,
isCloudUserVal,
isFetchingLicenses,
isFetchingUser,
]);
}, [orgPreferences, dispatch, isLoadingOrgPreferences]);
useEffect(() => {
if (isLoggedInState && role !== USER_ROLES.ADMIN) {
dispatch({
type: UPDATE_IS_FETCHING_ORG_PREFERENCES,
payload: {
isFetchingOrgPreferences: false,
},
});
}
}, [isLoggedInState, role, dispatch]);
const featureResponse = useGetFeatureFlag((allFlags) => {
dispatch({
type: UPDATE_FEATURE_FLAG_RESPONSE,
payload: {
featureFlag: allFlags,
refetch: featureResponse.refetch,
},
});
const isOnboardingEnabled =
allFlags.find((flag) => flag.name === FeatureKeys.ONBOARDING)?.active ||
false;
if (!isOnboardingEnabled || !isCloudUserVal) {
const newRoutes = routes.filter(
(route) => route?.path !== ROUTES.GET_STARTED,
);
setRoutes(newRoutes);
}
});
const isOnBasicPlan =
licenseData?.payload?.licenses?.some(
(license) =>
license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN,
) || licenseData?.payload?.licenses === null;
const enableAnalytics = (user: User): void => {
const orgName =
org && Array.isArray(org) && org.length > 0 ? org[0].name : '';
const { name, email } = user;
const identifyPayload = {
email,
name,
company_name: orgName,
role,
source: 'signoz-ui',
};
const sanitizedIdentifyPayload = pickBy(identifyPayload, identity);
const domain = extractDomain(email);
const hostNameParts = hostname.split('.');
const groupTraits = {
name: orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
};
window.analytics.identify(email, sanitizedIdentifyPayload);
window.analytics.group(domain, groupTraits);
posthog?.identify(email, {
email,
name,
orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
isPaidUser: !!licenseData?.payload?.trialConvertedToSubscription,
});
posthog?.group('company', domain, {
name: orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
isPaidUser: !!licenseData?.payload?.trialConvertedToSubscription,
});
};
useEffect(() => {
const isIdentifiedUser = getLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER);
if (
isLoggedInState &&
user &&
user.userId &&
user.email &&
!isIdentifiedUser
) {
setLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER, 'true');
}
if (
isOnBasicPlan ||
(isLoggedInState && role && role !== 'ADMIN') ||
!(isCloudUserVal || isEECloudUser())
) {
const newRoutes = routes.filter((route) => route?.path !== ROUTES.BILLING);
setRoutes(newRoutes);
}
if (isCloudUserVal || isEECloudUser()) {
const newRoutes = [...routes, SUPPORT_ROUTE];
setRoutes(newRoutes);
} else {
const newRoutes = [...routes, LIST_LICENSES];
setRoutes(newRoutes);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [isLoggedInState, isOnBasicPlan, user]);
useEffect(() => {
if (pathname === ROUTES.ONBOARDING) {
@@ -177,123 +237,99 @@ function App(): JSX.Element {
}
trackPageView(pathname);
}, [pathname, trackPageView]);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [pathname]);
useEffect(() => {
// feature flag shouldn't be loading and featureFlags or fetchError any one of this should be true indicating that req is complete
// licenses should also be present. there is no check for licenses for loading and error as that is mandatory if not present then routing
// to something went wrong which would ideally need a reload.
if (
!isFetchingFeatureFlags &&
(featureFlags || featureFlagsFetchError) &&
licenses
) {
let isChatSupportEnabled = false;
let isPremiumSupportEnabled = false;
if (featureFlags && featureFlags.length > 0) {
isChatSupportEnabled =
featureFlags.find((flag) => flag.name === FeatureKeys.CHAT_SUPPORT)
?.active || false;
const showAddCreditCardModal =
!isPremiumSupportEnabled &&
!licenseData?.payload?.trialConvertedToSubscription;
isPremiumSupportEnabled =
featureFlags.find((flag) => flag.name === FeatureKeys.PREMIUM_SUPPORT)
?.active || false;
}
const showAddCreditCardModal =
!isPremiumSupportEnabled && !licenses.trialConvertedToSubscription;
if (isLoggedInState && isChatSupportEnabled && !showAddCreditCardModal) {
window.Intercom('boot', {
app_id: process.env.INTERCOM_APP_ID,
email: user?.email || '',
name: user?.name || '',
});
}
if (isLoggedInState && isChatSupportEnabled && !showAddCreditCardModal) {
window.Intercom('boot', {
app_id: process.env.INTERCOM_APP_ID,
email: user?.email || '',
name: user?.name || '',
});
}
}, [
isLoggedInState,
isChatSupportEnabled,
user,
licenseData,
isPremiumSupportEnabled,
pathname,
licenses?.trialConvertedToSubscription,
featureFlags,
isFetchingFeatureFlags,
featureFlagsFetchError,
licenses,
]);
useEffect(() => {
if (!isFetchingUser && isCloudUserVal && user && user.email) {
if (user && user?.email && user?.userId && user?.name) {
try {
const isThemeAnalyticsSent = getLocalStorageApi(
LOCALSTORAGE.THEME_ANALYTICS_V1,
);
if (!isThemeAnalyticsSent) {
logEvent('Theme Analytics', {
theme: isDarkMode ? THEME_MODE.DARK : THEME_MODE.LIGHT,
user: pick(user, ['email', 'userId', 'name']),
org,
});
setLocalStorageApi(LOCALSTORAGE.THEME_ANALYTICS_V1, 'true');
}
} catch {
console.error('Failed to parse local storage theme analytics event');
}
}
if (isCloudUserVal && user && user.email) {
enableAnalytics(user);
}
}, [user, isFetchingUser, isCloudUserVal, enableAnalytics]);
// if the user is in logged in state
if (isLoggedInState) {
if (pathname === ROUTES.HOME_PAGE) {
history.replace(ROUTES.APPLICATION);
}
// if the setup calls are loading then return a spinner
if (isFetchingLicenses || isFetchingUser || isFetchingFeatureFlags) {
return <Spinner tip="Loading..." />;
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [user]);
// if the required calls fails then return a something went wrong error
// this needs to be on top of data missing error because if there is an error, data will never be loaded and it will
// move to indefinitive loading
if (
(userFetchError || licensesFetchError) &&
pathname !== ROUTES.SOMETHING_WENT_WRONG
) {
history.replace(ROUTES.SOMETHING_WENT_WRONG);
}
// if all of the data is not set then return a spinner, this is required because there is some gap between loading states and data setting
if (
(!licenses || !user.email || !featureFlags) &&
!userFetchError &&
!licensesFetchError
) {
return <Spinner tip="Loading..." />;
}
}
useEffect(() => {
console.info('We are hiring! https://jobs.gem.com/signoz');
}, []);
return (
<ConfigProvider theme={themeConfig}>
<Router history={history}>
<CompatRouter>
<NotificationProvider>
<PrivateRoute>
<ResourceProvider>
<QueryBuilderProvider>
<DashboardProvider>
<KeyboardHotkeysProvider>
<AlertRuleProvider>
<AppLayout>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<AppProvider>
<ConfigProvider theme={themeConfig}>
<Router history={history}>
<CompatRouter>
<NotificationProvider>
<PrivateRoute>
<ResourceProvider>
<QueryBuilderProvider>
<DashboardProvider>
<KeyboardHotkeysProvider>
<AlertRuleProvider>
<AppLayout>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</AppLayout>
</AlertRuleProvider>
</KeyboardHotkeysProvider>
</DashboardProvider>
</QueryBuilderProvider>
</ResourceProvider>
</PrivateRoute>
</NotificationProvider>
</CompatRouter>
</Router>
</ConfigProvider>
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</AppLayout>
</AlertRuleProvider>
</KeyboardHotkeysProvider>
</DashboardProvider>
</QueryBuilderProvider>
</ResourceProvider>
</PrivateRoute>
</NotificationProvider>
</CompatRouter>
</Router>
</ConfigProvider>
</AppProvider>
);
}

View File

@@ -145,11 +145,6 @@ export const MySettings = Loadable(
() => import(/* webpackChunkName: "All MySettings" */ 'pages/MySettings'),
);
export const CustomDomainSettings = Loadable(
() =>
import(/* webpackChunkName: "Custom Domain Settings" */ 'pages/Settings'),
);
export const Logs = Loadable(
() => import(/* webpackChunkName: "Logs" */ 'pages/LogsModulePage'),
);
@@ -185,7 +180,7 @@ export const PasswordReset = Loadable(
export const SomethingWentWrong = Loadable(
() =>
import(
/* webpackChunkName: "ErrorBoundaryFallback" */ 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback'
/* webpackChunkName: "SomethingWentWrong" */ 'pages/SomethingWentWrong'
),
);

View File

@@ -10,7 +10,6 @@ import {
BillingPage,
CreateAlertChannelAlerts,
CreateNewAlerts,
CustomDomainSettings,
DashboardPage,
DashboardWidget,
EditAlertChannelsAlerts,
@@ -289,13 +288,6 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'MY_SETTINGS',
},
{
path: ROUTES.CUSTOM_DOMAIN_SETTINGS,
exact: true,
component: CustomDomainSettings,
isPrivate: true,
key: 'CUSTOM_DOMAIN_SETTINGS',
},
{
path: ROUTES.LOGS,
exact: true,
@@ -415,13 +407,6 @@ const routes: AppRoutes[] = [
key: 'INFRASTRUCTURE_MONITORING_HOSTS',
isPrivate: true,
},
{
path: ROUTES.INFRASTRUCTURE_MONITORING_KUBERNETES,
exact: true,
component: InfrastructureMonitoring,
key: 'INFRASTRUCTURE_MONITORING_KUBERNETES',
isPrivate: true,
},
];
export const SUPPORT_ROUTE: AppRoutes = {
@@ -442,27 +427,24 @@ export const LIST_LICENSES: AppRoutes = {
export const oldRoutes = [
'/pipelines',
'/logs/old-logs-explorer',
'/logs-explorer',
'/logs-explorer/live',
'/logs-save-views',
'/traces-save-views',
'/settings/access-tokens',
'/settings/api-keys',
];
export const oldNewRoutesMapping: Record<string, string> = {
'/pipelines': '/logs/pipelines',
'/logs/old-logs-explorer': '/logs/old-logs-explorer',
'/logs-explorer': '/logs/logs-explorer',
'/logs-explorer/live': '/logs/logs-explorer/live',
'/logs-save-views': '/logs/saved-views',
'/traces-save-views': '/traces/saved-views',
'/settings/access-tokens': '/settings/api-keys',
'/settings/api-keys': '/settings/access-tokens',
};
export const ROUTES_NOT_TO_BE_OVERRIDEN: string[] = [
ROUTES.WORKSPACE_LOCKED,
ROUTES.WORKSPACE_SUSPENDED,
];
export interface AppRoutes {
component: RouteProps['component'];
path: RouteProps['path'];

View File

@@ -1,28 +1,92 @@
import getLocalStorageApi from 'api/browser/localstorage/get';
import setLocalStorageApi from 'api/browser/localstorage/set';
import getUserApi from 'api/user/getUser';
import { Logout } from 'api/utils';
import { LOCALSTORAGE } from 'constants/localStorage';
import store from 'store';
import AppActions from 'types/actions';
import {
LOGGED_IN,
UPDATE_USER,
UPDATE_USER_ACCESS_REFRESH_ACCESS_TOKEN,
UPDATE_USER_IS_FETCH,
} from 'types/actions/app';
import { SuccessResponse } from 'types/api';
import { PayloadProps } from 'types/api/user/getUser';
const afterLogin = (
const afterLogin = async (
userId: string,
authToken: string,
refreshToken: string,
interceptorRejected?: boolean,
): void => {
): Promise<SuccessResponse<PayloadProps> | undefined> => {
setLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN, authToken);
setLocalStorageApi(LOCALSTORAGE.REFRESH_AUTH_TOKEN, refreshToken);
setLocalStorageApi(LOCALSTORAGE.USER_ID, userId);
setLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN, 'true');
if (!interceptorRejected) {
window.dispatchEvent(
new CustomEvent('AFTER_LOGIN', {
detail: {
accessJWT: authToken,
refreshJWT: refreshToken,
id: userId,
},
}),
);
store.dispatch<AppActions>({
type: UPDATE_USER_ACCESS_REFRESH_ACCESS_TOKEN,
payload: {
accessJwt: authToken,
refreshJwt: refreshToken,
},
});
const [getUserResponse] = await Promise.all([
getUserApi({
userId,
token: authToken,
}),
]);
if (getUserResponse.statusCode === 200 && getUserResponse.payload) {
store.dispatch<AppActions>({
type: LOGGED_IN,
payload: {
isLoggedIn: true,
},
});
const { payload } = getUserResponse;
store.dispatch<AppActions>({
type: UPDATE_USER,
payload: {
ROLE: payload.role,
email: payload.email,
name: payload.name,
orgName: payload.organization,
profilePictureURL: payload.profilePictureURL,
userId: payload.id,
orgId: payload.orgId,
userFlags: payload.flags,
},
});
const isLoggedInLocalStorage = getLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN);
if (isLoggedInLocalStorage === null) {
setLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN, 'true');
}
store.dispatch({
type: UPDATE_USER_IS_FETCH,
payload: {
isUserFetching: false,
},
});
return getUserResponse;
}
store.dispatch({
type: UPDATE_USER_IS_FETCH,
payload: {
isUserFetching: false,
},
});
Logout();
return undefined;
};
export default afterLogin;

View File

@@ -1,7 +0,0 @@
import { GatewayApiV2Instance as axios } from 'api';
import { AxiosResponse } from 'axios';
import { DeploymentsDataProps } from 'types/api/customDomain/types';
export const getDeploymentsData = (): Promise<
AxiosResponse<DeploymentsDataProps>
> => axios.get(`/deployments/me`);

View File

@@ -1,16 +0,0 @@
import { GatewayApiV2Instance as axios } from 'api';
import { AxiosError } from 'axios';
import { SuccessResponse } from 'types/api';
import {
PayloadProps,
UpdateCustomDomainProps,
} from 'types/api/customDomain/types';
const updateSubDomainAPI = async (
props: UpdateCustomDomainProps,
): Promise<SuccessResponse<PayloadProps> | AxiosError> =>
axios.put(`/deployments/me/host`, {
...props.data,
});
export default updateSubDomainAPI;

View File

@@ -7,6 +7,7 @@ import afterLogin from 'AppRoutes/utils';
import axios, { AxiosResponse, InternalAxiosRequestConfig } from 'axios';
import { ENVIRONMENT } from 'constants/env';
import { LOCALSTORAGE } from 'constants/localStorage';
import store from 'store';
import apiV1, {
apiAlertManager,
@@ -25,7 +26,10 @@ const interceptorsResponse = (
const interceptorsRequestResponse = (
value: InternalAxiosRequestConfig,
): InternalAxiosRequestConfig => {
const token = getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN) || '';
const token =
store.getState().app.user?.accessJwt ||
getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN) ||
'';
if (value && value.headers) {
value.headers.Authorization = token ? `Bearer ${token}` : '';
@@ -43,36 +47,41 @@ const interceptorRejected = async (
// reject the refresh token error
if (response.status === 401 && response.config.url !== '/login') {
const response = await loginApi({
refreshToken: getLocalStorageApi(LOCALSTORAGE.REFRESH_AUTH_TOKEN) || '',
refreshToken: store.getState().app.user?.refreshJwt,
});
if (response.statusCode === 200) {
afterLogin(
const user = await afterLogin(
response.payload.userId,
response.payload.accessJwt,
response.payload.refreshJwt,
true,
);
const reResponse = await axios(
`${value.config.baseURL}${value.config.url?.substring(1)}`,
{
method: value.config.method,
headers: {
...value.config.headers,
Authorization: `Bearer ${response.payload.accessJwt}`,
if (user) {
const reResponse = await axios(
`${value.config.baseURL}${value.config.url?.substring(1)}`,
{
method: value.config.method,
headers: {
...value.config.headers,
Authorization: `Bearer ${response.payload.accessJwt}`,
},
data: {
...JSON.parse(value.config.data || '{}'),
},
},
data: {
...JSON.parse(value.config.data || '{}'),
},
},
);
);
if (reResponse.status === 200) {
return await Promise.resolve(reResponse);
if (reResponse.status === 200) {
return await Promise.resolve(reResponse);
}
Logout();
return await Promise.reject(reResponse);
}
Logout();
return await Promise.reject(reResponse);
return await Promise.reject(value);
}
Logout();
}

View File

@@ -2,7 +2,6 @@ import { ApiBaseInstance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError, AxiosResponse } from 'axios';
import { baseAutoCompleteIdKeysOrder } from 'constants/queryBuilder';
import { K8sCategory } from 'container/InfraMonitoringK8s/constants';
import { createIdFromObjectFields } from 'lib/createIdFromObjectFields';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
@@ -12,18 +11,12 @@ import {
export const getHostAttributeKeys = async (
searchText = '',
entity: K8sCategory,
): Promise<SuccessResponse<IQueryAutocompleteResponse> | ErrorResponse> => {
try {
const response: AxiosResponse<{
data: IQueryAutocompleteResponse;
}> = await ApiBaseInstance.get(
`/${entity}/attribute_keys?dataSource=metrics&searchText=${searchText}`,
{
params: {
limit: 500,
},
},
`/hosts/attribute_keys?dataSource=metrics&searchText=${searchText}`,
);
const payload: BaseAutocompleteData[] =

View File

@@ -14,7 +14,6 @@ export const getInfraAttributesValues = async ({
filterAttributeKeyDataType,
tagType,
searchText,
aggregateAttribute,
}: IGetAttributeValuesPayload): Promise<
SuccessResponse<IAttributeValuesResponse> | ErrorResponse
> => {
@@ -24,7 +23,6 @@ export const getInfraAttributesValues = async ({
dataSource,
attributeKey,
searchText,
aggregateAttribute,
})}&filterAttributeKeyDataType=${filterAttributeKeyDataType}&tagType=${tagType}`,
);

View File

@@ -1,65 +0,0 @@
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
export interface K8sNodesListPayload {
filters: TagFilter;
groupBy?: BaseAutocompleteData[];
offset?: number;
limit?: number;
orderBy?: {
columnName: string;
order: 'asc' | 'desc';
};
}
export interface K8sNodesData {
nodeUID: string;
nodeCPUUsage: number;
nodeCPUAllocatable: number;
nodeMemoryUsage: number;
nodeMemoryAllocatable: number;
meta: {
k8s_node_name: string;
k8s_node_uid: string;
k8s_cluster_name: string;
};
}
export interface K8sNodesListResponse {
status: string;
data: {
type: string;
records: K8sNodesData[];
groups: null;
total: number;
sentAnyHostMetricsData: boolean;
isSendingK8SAgentMetrics: boolean;
};
}
export const getK8sNodesList = async (
props: K8sNodesListPayload,
signal?: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponse<K8sNodesListResponse> | ErrorResponse> => {
try {
const response = await ApiBaseInstance.post('/nodes/list', props, {
signal,
headers,
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data,
params: props,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};

View File

@@ -1,93 +0,0 @@
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
export interface K8sPodsListPayload {
filters: TagFilter;
groupBy?: BaseAutocompleteData[];
offset?: number;
limit?: number;
orderBy?: {
columnName: string;
order: 'asc' | 'desc';
};
}
export interface TimeSeriesValue {
timestamp: number;
value: string;
}
export interface TimeSeries {
labels: Record<string, string>;
labelsArray: Array<Record<string, string>>;
values: TimeSeriesValue[];
}
export interface K8sPodsData {
podUID: string;
podCPU: number;
podCPURequest: number;
podCPULimit: number;
podMemory: number;
podMemoryRequest: number;
podMemoryLimit: number;
restartCount: number;
meta: {
k8s_cronjob_name: string;
k8s_daemonset_name: string;
k8s_deployment_name: string;
k8s_job_name: string;
k8s_namespace_name: string;
k8s_node_name: string;
k8s_pod_name: string;
k8s_pod_uid: string;
k8s_statefulset_name: string;
k8s_cluster_name: string;
};
countByPhase: {
pending: number;
running: number;
succeeded: number;
failed: number;
unknown: number;
};
}
export interface K8sPodsListResponse {
status: string;
data: {
type: string;
records: K8sPodsData[];
groups: null;
total: number;
sentAnyHostMetricsData: boolean;
isSendingK8SAgentMetrics: boolean;
};
}
export const getK8sPodsList = async (
props: K8sPodsListPayload,
signal?: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponse<K8sPodsListResponse> | ErrorResponse> => {
try {
const response = await ApiBaseInstance.post('/pods/list', props, {
signal,
headers,
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data,
params: props,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};

View File

@@ -1,18 +1,24 @@
import { ApiV2Instance as axios } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps } from 'types/api/licenses/getAll';
const getAll = async (): Promise<
SuccessResponse<PayloadProps> | ErrorResponse
> => {
const response = await axios.get('/licenses');
try {
const response = await axios.get('/licenses');
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default getAll;

View File

@@ -1,18 +1,28 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/user/getUser';
const getUser = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const response = await axios.get(`/user/${props.userId}`);
try {
const response = await axios.get(`/user/${props.userId}`, {
headers: {
Authorization: `bearer ${props.token}`,
},
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data,
};
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default getUser;

View File

@@ -2,6 +2,14 @@ import deleteLocalStorageKey from 'api/browser/localstorage/remove';
import { LOCALSTORAGE } from 'constants/localStorage';
import ROUTES from 'constants/routes';
import history from 'lib/history';
import store from 'store';
import {
LOGGED_IN,
UPDATE_ORG,
UPDATE_USER,
UPDATE_USER_ACCESS_REFRESH_ACCESS_TOKEN,
UPDATE_USER_ORG_ROLE,
} from 'types/actions/app';
export const Logout = (): void => {
deleteLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN);
@@ -11,9 +19,50 @@ export const Logout = (): void => {
deleteLocalStorageKey(LOCALSTORAGE.LOGGED_IN_USER_EMAIL);
deleteLocalStorageKey(LOCALSTORAGE.LOGGED_IN_USER_NAME);
deleteLocalStorageKey(LOCALSTORAGE.CHAT_SUPPORT);
deleteLocalStorageKey(LOCALSTORAGE.USER_ID);
window.dispatchEvent(new CustomEvent('LOGOUT'));
store.dispatch({
type: LOGGED_IN,
payload: {
isLoggedIn: false,
},
});
store.dispatch({
type: UPDATE_USER_ORG_ROLE,
payload: {
org: null,
role: null,
},
});
store.dispatch({
type: UPDATE_USER,
payload: {
ROLE: 'VIEWER',
email: '',
name: '',
orgId: '',
orgName: '',
profilePictureURL: '',
userId: '',
userFlags: {},
},
});
store.dispatch({
type: UPDATE_USER_ACCESS_REFRESH_ACCESS_TOKEN,
payload: {
accessJwt: '',
refreshJwt: '',
},
});
store.dispatch({
type: UPDATE_ORG,
payload: {
org: [],
},
});
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore

View File

@@ -2,9 +2,9 @@ import { Button, Modal, Typography } from 'antd';
import updateCreditCardApi from 'api/billing/checkout';
import logEvent from 'api/common/logEvent';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import useLicense from 'hooks/useLicense';
import { useNotifications } from 'hooks/useNotifications';
import { CreditCard, X } from 'lucide-react';
import { useAppContext } from 'providers/App/App';
import { useEffect, useState } from 'react';
import { useMutation } from 'react-query';
import { useLocation } from 'react-router-dom';
@@ -20,16 +20,16 @@ export default function ChatSupportGateway(): JSX.Element {
false,
);
const { licenses, isFetchingLicenses } = useAppContext();
const { data: licenseData, isFetching } = useLicense();
useEffect(() => {
if (!isFetchingLicenses && licenses) {
const activeValidLicense =
licenses.licenses?.find((license) => license.isCurrent === true) || null;
const activeValidLicense =
licenseData?.payload?.licenses?.find(
(license) => license.isCurrent === true,
) || null;
setActiveLicense(activeValidLicense);
}
}, [licenses, isFetchingLicenses]);
setActiveLicense(activeValidLicense);
}, [licenseData, isFetching]);
const handleBillingOnSuccess = (
data: ErrorResponse | SuccessResponse<CheckoutSuccessPayloadProps, unknown>,

View File

@@ -40,7 +40,7 @@
&.custom-time {
input:not(:focus) {
min-width: 280px;
min-width: 240px;
}
}
@@ -119,69 +119,3 @@
color: var(--bg-slate-400) !important;
}
}
.date-time-popover__footer {
border-top: 1px solid var(--bg-ink-200);
padding: 8px 14px;
.timezone-container {
&,
.timezone {
font-family: Inter;
font-size: 12px;
line-height: 16px;
letter-spacing: -0.06px;
}
display: flex;
align-items: center;
color: var(--bg-vanilla-400);
gap: 6px;
.timezone {
display: flex;
align-items: center;
gap: 4px;
border-radius: 2px;
background: rgba(171, 189, 255, 0.04);
cursor: pointer;
padding: 0px 4px;
color: var(--bg-vanilla-100);
border: none;
}
}
}
.timezone-badge {
display: flex;
align-items: center;
justify-content: center;
padding: 0 4px;
border-radius: 2px;
background: rgba(171, 189, 255, 0.04);
color: var(--bg-vanilla-100);
font-size: 12px;
font-weight: 400;
line-height: 16px;
letter-spacing: -0.06px;
cursor: pointer;
}
.lightMode {
.date-time-popover__footer {
border-color: var(--bg-vanilla-400);
}
.timezone-container {
color: var(--bg-ink-400);
&__clock-icon {
stroke: var(--bg-ink-400);
}
.timezone {
color: var(--bg-ink-100);
background: rgb(179 179 179 / 15%);
&__icon {
stroke: var(--bg-ink-100);
}
}
}
.timezone-badge {
color: var(--bg-ink-100);
background: rgb(179 179 179 / 15%);
}
}

View File

@@ -3,7 +3,6 @@
import './CustomTimePicker.styles.scss';
import { Input, Popover, Tooltip, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import cx from 'classnames';
import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
import {
@@ -16,14 +15,11 @@ import { isValidTimeFormat } from 'lib/getMinMax';
import { defaultTo, isFunction, noop } from 'lodash-es';
import debounce from 'lodash-es/debounce';
import { CheckCircle, ChevronDown, Clock } from 'lucide-react';
import { useTimezone } from 'providers/Timezone';
import {
ChangeEvent,
Dispatch,
SetStateAction,
useCallback,
useEffect,
useMemo,
useState,
} from 'react';
import { useLocation } from 'react-router-dom';
@@ -32,8 +28,6 @@ import { popupContainer } from 'utils/selectPopupContainer';
import CustomTimePickerPopoverContent from './CustomTimePickerPopoverContent';
const maxAllowedMinTimeInMonths = 6;
type ViewType = 'datetime' | 'timezone';
const DEFAULT_VIEW: ViewType = 'datetime';
interface CustomTimePickerProps {
onSelect: (value: string) => void;
@@ -87,42 +81,11 @@ function CustomTimePicker({
const location = useLocation();
const [isInputFocused, setIsInputFocused] = useState(false);
const [activeView, setActiveView] = useState<ViewType>(DEFAULT_VIEW);
const { timezone, browserTimezone } = useTimezone();
const activeTimezoneOffset = timezone.offset;
const isTimezoneOverridden = useMemo(
() => timezone.offset !== browserTimezone.offset,
[timezone, browserTimezone],
);
const handleViewChange = useCallback(
(newView: 'timezone' | 'datetime'): void => {
if (activeView !== newView) {
setActiveView(newView);
}
setOpen(true);
},
[activeView, setOpen],
);
const [isOpenedFromFooter, setIsOpenedFromFooter] = useState(false);
const getSelectedTimeRangeLabel = (
selectedTime: string,
selectedTimeValue: string,
): string => {
if (selectedTime === 'custom') {
// Convert the date range string to 12-hour format
const dates = selectedTimeValue.split(' - ');
if (dates.length === 2) {
const startDate = dayjs(dates[0], 'DD/MM/YYYY HH:mm');
const endDate = dayjs(dates[1], 'DD/MM/YYYY HH:mm');
return `${startDate.format('DD/MM/YYYY hh:mm A')} - ${endDate.format(
'DD/MM/YYYY hh:mm A',
)}`;
}
return selectedTimeValue;
}
@@ -168,7 +131,6 @@ function CustomTimePicker({
setOpen(newOpen);
if (!newOpen) {
setCustomDTPickerVisible?.(false);
setActiveView('datetime');
}
};
@@ -282,7 +244,6 @@ function CustomTimePicker({
const handleFocus = (): void => {
setIsInputFocused(true);
setActiveView('datetime');
};
const handleBlur = (): void => {
@@ -298,18 +259,6 @@ function CustomTimePicker({
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [location.pathname]);
const handleTimezoneHintClick = (e: React.MouseEvent): void => {
e.stopPropagation();
handleViewChange('timezone');
setIsOpenedFromFooter(false);
logEvent(
'DateTimePicker: Timezone picker opened from time range input badge',
{
page: location.pathname,
},
);
};
return (
<div className="custom-time-picker">
<Popover
@@ -331,10 +280,6 @@ function CustomTimePicker({
handleGoLive={defaultTo(handleGoLive, noop)}
options={items}
selectedTime={selectedTime}
activeView={activeView}
setActiveView={setActiveView}
setIsOpenedFromFooter={setIsOpenedFromFooter}
isOpenedFromFooter={isOpenedFromFooter}
/>
) : (
content
@@ -371,17 +316,12 @@ function CustomTimePicker({
)
}
suffix={
<>
{!!isTimezoneOverridden && activeTimezoneOffset && (
<div className="timezone-badge" onClick={handleTimezoneHintClick}>
<span>{activeTimezoneOffset}</span>
</div>
)}
<ChevronDown
size={14}
onClick={(): void => handleViewChange('datetime')}
/>
</>
<ChevronDown
size={14}
onClick={(): void => {
setOpen(!open);
}}
/>
}
/>
</Popover>

View File

@@ -1,8 +1,6 @@
import './CustomTimePicker.styles.scss';
import { Color } from '@signozhq/design-tokens';
import { Button } from 'antd';
import logEvent from 'api/common/logEvent';
import cx from 'classnames';
import ROUTES from 'constants/routes';
import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
@@ -11,13 +9,10 @@ import {
Option,
RelativeDurationSuggestionOptions,
} from 'container/TopNav/DateTimeSelectionV2/config';
import { Clock, PenLine } from 'lucide-react';
import { useTimezone } from 'providers/Timezone';
import { Dispatch, SetStateAction, useMemo } from 'react';
import { useLocation } from 'react-router-dom';
import RangePickerModal from './RangePickerModal';
import TimezonePicker from './TimezonePicker';
interface CustomTimePickerPopoverContentProps {
options: any[];
@@ -31,13 +26,8 @@ interface CustomTimePickerPopoverContentProps {
onSelectHandler: (label: string, value: string) => void;
handleGoLive: () => void;
selectedTime: string;
activeView: 'datetime' | 'timezone';
setActiveView: Dispatch<SetStateAction<'datetime' | 'timezone'>>;
isOpenedFromFooter: boolean;
setIsOpenedFromFooter: Dispatch<SetStateAction<boolean>>;
}
// eslint-disable-next-line sonarjs/cognitive-complexity
function CustomTimePickerPopoverContent({
options,
setIsOpen,
@@ -47,18 +37,12 @@ function CustomTimePickerPopoverContent({
onSelectHandler,
handleGoLive,
selectedTime,
activeView,
setActiveView,
isOpenedFromFooter,
setIsOpenedFromFooter,
}: CustomTimePickerPopoverContentProps): JSX.Element {
const { pathname } = useLocation();
const isLogsExplorerPage = useMemo(() => pathname === ROUTES.LOGS_EXPLORER, [
pathname,
]);
const { timezone } = useTimezone();
const activeTimezoneOffset = timezone.offset;
function getTimeChips(options: Option[]): JSX.Element {
return (
@@ -79,105 +63,55 @@ function CustomTimePickerPopoverContent({
);
}
const handleTimezoneHintClick = (): void => {
setActiveView('timezone');
setIsOpenedFromFooter(true);
logEvent(
'DateTimePicker: Timezone picker opened from time range picker footer',
{
page: pathname,
},
);
};
if (activeView === 'timezone') {
return (
<div className="date-time-popover">
<TimezonePicker
setActiveView={setActiveView}
setIsOpen={setIsOpen}
isOpenedFromFooter={isOpenedFromFooter}
/>
</div>
);
}
return (
<>
<div className="date-time-popover">
<div className="date-time-options">
{isLogsExplorerPage && (
<Button className="data-time-live" type="text" onClick={handleGoLive}>
Live
</Button>
)}
{options.map((option) => (
<Button
type="text"
key={option.label + option.value}
onClick={(): void => {
onSelectHandler(option.label, option.value);
}}
className={cx(
'date-time-options-btn',
customDateTimeVisible
? option.value === 'custom' && 'active'
: selectedTime === option.value && 'active',
)}
>
{option.label}
</Button>
))}
</div>
<div
className={cx(
'relative-date-time',
selectedTime === 'custom' || customDateTimeVisible
? 'date-picker'
: 'relative-times',
)}
>
{selectedTime === 'custom' || customDateTimeVisible ? (
<RangePickerModal
setCustomDTPickerVisible={setCustomDTPickerVisible}
setIsOpen={setIsOpen}
onCustomDateHandler={onCustomDateHandler}
selectedTime={selectedTime}
/>
) : (
<div className="relative-times-container">
<div className="time-heading">RELATIVE TIMES</div>
<div>{getTimeChips(RelativeDurationSuggestionOptions)}</div>
</div>
)}
</div>
</div>
<div className="date-time-popover__footer">
<div className="timezone-container">
<Clock
color={Color.BG_VANILLA_400}
className="timezone-container__clock-icon"
height={12}
width={12}
/>
<span className="timezone__icon">Current timezone</span>
<div></div>
<button
type="button"
className="timezone"
onClick={handleTimezoneHintClick}
<div className="date-time-popover">
<div className="date-time-options">
{isLogsExplorerPage && (
<Button className="data-time-live" type="text" onClick={handleGoLive}>
Live
</Button>
)}
{options.map((option) => (
<Button
type="text"
key={option.label + option.value}
onClick={(): void => {
onSelectHandler(option.label, option.value);
}}
className={cx(
'date-time-options-btn',
customDateTimeVisible
? option.value === 'custom' && 'active'
: selectedTime === option.value && 'active',
)}
>
<span>{activeTimezoneOffset}</span>
<PenLine
color={Color.BG_VANILLA_100}
className="timezone__icon"
size={10}
/>
</button>
</div>
{option.label}
</Button>
))}
</div>
</>
<div
className={cx(
'relative-date-time',
selectedTime === 'custom' || customDateTimeVisible
? 'date-picker'
: 'relative-times',
)}
>
{selectedTime === 'custom' || customDateTimeVisible ? (
<RangePickerModal
setCustomDTPickerVisible={setCustomDTPickerVisible}
setIsOpen={setIsOpen}
onCustomDateHandler={onCustomDateHandler}
selectedTime={selectedTime}
/>
) : (
<div className="relative-times-container">
<div className="time-heading">RELATIVE TIMES</div>
<div>{getTimeChips(RelativeDurationSuggestionOptions)}</div>
</div>
)}
</div>
</div>
);
}

View File

@@ -4,8 +4,7 @@ import { DatePicker } from 'antd';
import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
import { LexicalContext } from 'container/TopNav/DateTimeSelectionV2/config';
import dayjs, { Dayjs } from 'dayjs';
import { useTimezone } from 'providers/Timezone';
import { Dispatch, SetStateAction, useMemo } from 'react';
import { Dispatch, SetStateAction } from 'react';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { GlobalReducer } from 'types/reducer/globalTime';
@@ -32,10 +31,7 @@ function RangePickerModal(props: RangePickerModalProps): JSX.Element {
(state) => state.globalTime,
);
// Using any type here because antd's DatePicker expects its own internal Dayjs type
// which conflicts with our project's Dayjs type that has additional plugins (tz, utc etc).
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/explicit-module-boundary-types
const disabledDate = (current: any): boolean => {
const disabledDate = (current: Dayjs): boolean => {
const currentDay = dayjs(current);
return currentDay.isAfter(dayjs());
};
@@ -53,32 +49,16 @@ function RangePickerModal(props: RangePickerModalProps): JSX.Element {
}
onCustomDateHandler(date_time, LexicalContext.CUSTOM_DATE_PICKER);
};
const { timezone } = useTimezone();
const rangeValue: [Dayjs, Dayjs] = useMemo(
() => [
dayjs(minTime / 1000_000).tz(timezone.value),
dayjs(maxTime / 1000_000).tz(timezone.value),
],
[maxTime, minTime, timezone.value],
);
return (
<div className="custom-date-picker">
<RangePicker
disabledDate={disabledDate}
allowClear
showTime={{
use12Hours: true,
format: 'hh:mm A',
}}
format={(date: Dayjs): string =>
date.tz(timezone.value).format('YYYY-MM-DD hh:mm A')
}
showTime
onOk={onModalOkHandler}
// eslint-disable-next-line react/jsx-props-no-spreading
{...(selectedTime === 'custom' && {
value: rangeValue,
defaultValue: [dayjs(minTime / 1000000), dayjs(maxTime / 1000000)],
})}
/>
</div>

View File

@@ -1,166 +0,0 @@
// Variables
$font-family: 'Inter';
$item-spacing: 8px;
:root {
--border-color: var(--bg-slate-400);
}
.lightMode {
--border-color: var(--bg-vanilla-400);
}
// Mixins
@mixin text-style-base {
font-family: $font-family;
font-style: normal;
font-weight: 400;
}
@mixin flex-center {
display: flex;
align-items: center;
}
.timezone-picker {
width: 532px;
color: var(--bg-vanilla-400);
font-family: $font-family;
&__search {
@include flex-center;
justify-content: space-between;
padding: 12px 14px;
border-bottom: 1px solid var(--border-color);
}
&__input-container {
@include flex-center;
gap: 6px;
width: -webkit-fill-available;
}
&__input {
@include text-style-base;
width: 100%;
background: transparent;
border: none;
outline: none;
color: var(--bg-vanilla-100);
font-size: 14px;
line-height: 20px;
letter-spacing: -0.07px;
padding: 0;
&.ant-input:focus {
box-shadow: none;
}
&::placeholder {
color: var(--bg-vanilla-400);
}
}
&__esc-key {
@include text-style-base;
font-size: 8px;
color: var(--bg-vanilla-400);
letter-spacing: -0.04px;
border-radius: 2.286px;
border: 1.143px solid var(--bg-ink-200);
border-bottom-width: 2.286px;
background: var(--bg-ink-400);
padding: 0 1px;
}
&__list {
max-height: 310px;
overflow-y: auto;
}
&__item {
@include flex-center;
justify-content: space-between;
padding: 7.5px 6px 7.5px $item-spacing;
margin: 4px $item-spacing;
cursor: pointer;
background: transparent;
border: none;
width: -webkit-fill-available;
color: var(--bg-vanilla-400);
font-family: $font-family;
&:hover,
&.selected {
border-radius: 2px;
background: rgba(171, 189, 255, 0.04);
color: var(--bg-vanilla-100);
}
&.has-divider {
position: relative;
&::after {
content: '';
position: absolute;
bottom: -2px;
left: -$item-spacing;
right: -$item-spacing;
border-bottom: 1px solid var(--border-color);
}
}
}
&__name {
@include text-style-base;
font-size: 14px;
line-height: 20px;
letter-spacing: -0.07px;
}
&__offset {
color: var(--bg-vanilla-100);
font-size: 12px;
line-height: 16px;
letter-spacing: -0.06px;
}
}
.timezone-name-wrapper {
@include flex-center;
gap: 6px;
&__selected-icon {
height: 15px;
width: 15px;
}
}
.lightMode {
.timezone-picker {
&__search {
.search-icon {
stroke: var(--bg-ink-400);
}
}
&__input {
color: var(--bg-ink-100);
}
&__esc-key {
background-color: var(--bg-vanilla-100);
border-color: var(--bg-vanilla-400);
color: var(--bg-ink-400);
}
&__item {
color: var(--bg-ink-400);
}
&__offset {
color: var(--bg-ink-100);
}
}
.timezone-name-wrapper {
&__selected-icon {
.check-icon {
stroke: var(--bg-ink-100);
}
}
}
}

View File

@@ -1,207 +0,0 @@
import './TimezonePicker.styles.scss';
import { Color } from '@signozhq/design-tokens';
import { Input } from 'antd';
import logEvent from 'api/common/logEvent';
import cx from 'classnames';
import { TimezonePickerShortcuts } from 'constants/shortcuts/TimezonePickerShortcuts';
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
import { Check, Search } from 'lucide-react';
import { useTimezone } from 'providers/Timezone';
import {
Dispatch,
SetStateAction,
useCallback,
useEffect,
useState,
} from 'react';
import { Timezone, TIMEZONE_DATA } from './timezoneUtils';
interface SearchBarProps {
value: string;
onChange: (value: string) => void;
setIsOpen: Dispatch<SetStateAction<boolean>>;
setActiveView: Dispatch<SetStateAction<'datetime' | 'timezone'>>;
isOpenedFromFooter: boolean;
}
interface TimezoneItemProps {
timezone: Timezone;
isSelected?: boolean;
onClick?: () => void;
}
const ICON_SIZE = 14;
function SearchBar({
value,
onChange,
setIsOpen,
setActiveView,
isOpenedFromFooter = false,
}: SearchBarProps): JSX.Element {
const handleKeyDown = useCallback(
(e: React.KeyboardEvent): void => {
if (e.key === 'Escape') {
if (isOpenedFromFooter) {
setActiveView('datetime');
} else {
setIsOpen(false);
}
}
},
[setActiveView, setIsOpen, isOpenedFromFooter],
);
return (
<div className="timezone-picker__search">
<div className="timezone-picker__input-container">
<Search
color={Color.BG_VANILLA_400}
className="search-icon"
height={ICON_SIZE}
width={ICON_SIZE}
/>
<Input
type="text"
className="timezone-picker__input"
placeholder="Search timezones..."
value={value}
onChange={(e): void => onChange(e.target.value)}
onKeyDown={handleKeyDown}
tabIndex={0}
autoFocus
/>
</div>
<kbd className="timezone-picker__esc-key">esc</kbd>
</div>
);
}
function TimezoneItem({
timezone,
isSelected = false,
onClick,
}: TimezoneItemProps): JSX.Element {
return (
<button
type="button"
className={cx('timezone-picker__item', {
selected: isSelected,
'has-divider': timezone.hasDivider,
})}
onClick={onClick}
>
<div className="timezone-name-wrapper">
<div className="timezone-name-wrapper__selected-icon">
{isSelected && (
<Check
className="check-icon"
color={Color.BG_VANILLA_100}
height={ICON_SIZE}
width={ICON_SIZE}
/>
)}
</div>
<div className="timezone-picker__name">{timezone.name}</div>
</div>
<div className="timezone-picker__offset">{timezone.offset}</div>
</button>
);
}
TimezoneItem.defaultProps = {
isSelected: false,
onClick: undefined,
};
interface TimezonePickerProps {
setActiveView: Dispatch<SetStateAction<'datetime' | 'timezone'>>;
setIsOpen: Dispatch<SetStateAction<boolean>>;
isOpenedFromFooter: boolean;
}
function TimezonePicker({
setActiveView,
setIsOpen,
isOpenedFromFooter,
}: TimezonePickerProps): JSX.Element {
const [searchTerm, setSearchTerm] = useState('');
const { timezone, updateTimezone } = useTimezone();
const [selectedTimezone, setSelectedTimezone] = useState<string>(
timezone.name ?? TIMEZONE_DATA[0].name,
);
const getFilteredTimezones = useCallback((searchTerm: string): Timezone[] => {
const normalizedSearch = searchTerm.toLowerCase();
return TIMEZONE_DATA.filter(
(tz) =>
tz.name.toLowerCase().includes(normalizedSearch) ||
tz.offset.toLowerCase().includes(normalizedSearch) ||
tz.searchIndex.toLowerCase().includes(normalizedSearch),
);
}, []);
const handleCloseTimezonePicker = useCallback(() => {
if (isOpenedFromFooter) {
setActiveView('datetime');
} else {
setIsOpen(false);
}
}, [isOpenedFromFooter, setActiveView, setIsOpen]);
const handleTimezoneSelect = useCallback(
(timezone: Timezone) => {
setSelectedTimezone(timezone.name);
updateTimezone(timezone);
handleCloseTimezonePicker();
setIsOpen(false);
logEvent('DateTimePicker: New Timezone Selected', {
timezone: {
name: timezone.name,
offset: timezone.offset,
},
});
},
[handleCloseTimezonePicker, setIsOpen, updateTimezone],
);
// Register keyboard shortcuts
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
useEffect(() => {
registerShortcut(
TimezonePickerShortcuts.CloseTimezonePicker,
handleCloseTimezonePicker,
);
return (): void => {
deregisterShortcut(TimezonePickerShortcuts.CloseTimezonePicker);
};
}, [deregisterShortcut, handleCloseTimezonePicker, registerShortcut]);
return (
<div className="timezone-picker">
<SearchBar
value={searchTerm}
onChange={setSearchTerm}
setIsOpen={setIsOpen}
setActiveView={setActiveView}
isOpenedFromFooter={isOpenedFromFooter}
/>
<div className="timezone-picker__list">
{getFilteredTimezones(searchTerm).map((timezone) => (
<TimezoneItem
key={timezone.value}
timezone={timezone}
isSelected={timezone.name === selectedTimezone}
onClick={(): void => handleTimezoneSelect(timezone)}
/>
))}
</div>
</div>
);
}
export default TimezonePicker;

View File

@@ -1,152 +0,0 @@
import dayjs from 'dayjs';
import timezone from 'dayjs/plugin/timezone';
import utc from 'dayjs/plugin/utc';
dayjs.extend(utc);
dayjs.extend(timezone);
export interface Timezone {
name: string;
value: string;
offset: string;
searchIndex: string;
hasDivider?: boolean;
}
const TIMEZONE_TYPES = {
BROWSER: 'BROWSER',
UTC: 'UTC',
STANDARD: 'STANDARD',
} as const;
type TimezoneType = typeof TIMEZONE_TYPES[keyof typeof TIMEZONE_TYPES];
export const UTC_TIMEZONE: Timezone = {
name: 'Coordinated Universal Time — UTC, GMT',
value: 'UTC',
offset: 'UTC',
searchIndex: 'UTC',
hasDivider: true,
};
const normalizeTimezoneName = (timezone: string): string => {
// https://github.com/tc39/proposal-temporal/issues/1076
if (timezone === 'Asia/Calcutta') {
return 'Asia/Kolkata';
}
return timezone;
};
const formatOffset = (offsetMinutes: number): string => {
if (offsetMinutes === 0) return 'UTC';
const hours = Math.floor(Math.abs(offsetMinutes) / 60);
const minutes = Math.abs(offsetMinutes) % 60;
const sign = offsetMinutes > 0 ? '+' : '-';
return `UTC ${sign} ${hours}${
minutes ? `:${minutes.toString().padStart(2, '0')}` : ':00'
}`;
};
const createTimezoneEntry = (
name: string,
offsetMinutes: number,
type: TimezoneType = TIMEZONE_TYPES.STANDARD,
hasDivider = false,
): Timezone => {
const offset = formatOffset(offsetMinutes);
let value = name;
let displayName = name;
switch (type) {
case TIMEZONE_TYPES.BROWSER:
displayName = `Browser time — ${name}`;
value = name;
break;
case TIMEZONE_TYPES.UTC:
displayName = 'Coordinated Universal Time — UTC, GMT';
value = 'UTC';
break;
case TIMEZONE_TYPES.STANDARD:
displayName = name;
value = name;
break;
default:
console.error(`Invalid timezone type: ${type}`);
}
return {
name: displayName,
value,
offset,
searchIndex: offset.replace(/ /g, ''),
...(hasDivider && { hasDivider }),
};
};
const getOffsetByTimezone = (timezone: string): number => {
const dayjsTimezone = dayjs().tz(timezone);
return dayjsTimezone.utcOffset();
};
export const getBrowserTimezone = (): Timezone => {
const browserTz = dayjs.tz.guess();
const normalizedTz = normalizeTimezoneName(browserTz);
const browserOffset = getOffsetByTimezone(normalizedTz);
return createTimezoneEntry(
normalizedTz,
browserOffset,
TIMEZONE_TYPES.BROWSER,
);
};
const filterAndSortTimezones = (
allTimezones: string[],
browserTzName?: string,
includeEtcTimezones = false,
): Timezone[] =>
allTimezones
.filter((tz) => {
const isNotBrowserTz = tz !== browserTzName;
const isNotEtcTz = includeEtcTimezones || !tz.startsWith('Etc/');
return isNotBrowserTz && isNotEtcTz;
})
.sort((a, b) => a.localeCompare(b))
.map((tz) => {
const normalizedTz = normalizeTimezoneName(tz);
const offset = getOffsetByTimezone(normalizedTz);
return createTimezoneEntry(normalizedTz, offset);
});
const generateTimezoneData = (includeEtcTimezones = false): Timezone[] => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const allTimezones = (Intl as any).supportedValuesOf('timeZone');
const timezones: Timezone[] = [];
// Add browser timezone
const browserTzObject = getBrowserTimezone();
timezones.push(browserTzObject);
// Add UTC timezone with divider
timezones.push(UTC_TIMEZONE);
timezones.push(
...filterAndSortTimezones(
allTimezones,
browserTzObject.value,
includeEtcTimezones,
),
);
return timezones;
};
export const getTimezoneObjectByTimezoneString = (
timezone: string,
): Timezone => {
const utcOffset = getOffsetByTimezone(timezone);
return createTimezoneEntry(timezone, utcOffset);
};
export const TIMEZONE_DATA = generateTimezoneData();

View File

@@ -1,22 +1,15 @@
import { render } from '@testing-library/react';
import { Table } from 'antd';
import { matchMedia } from 'container/PipelinePage/tests/AddNewPipeline.test';
import { I18nextProvider } from 'react-i18next';
import { Provider } from 'react-redux';
import i18n from 'ReactI18';
import store from 'store';
import DraggableTableRow from '..';
beforeAll(() => {
Object.defineProperty(window, 'matchMedia', {
writable: true,
value: jest.fn().mockImplementation((query) => ({
matches: false,
media: query,
onchange: null,
addListener: jest.fn(),
removeListener: jest.fn(),
addEventListener: jest.fn(),
removeEventListener: jest.fn(),
dispatchEvent: jest.fn(),
})),
});
matchMedia();
});
jest.mock('uplot', () => {
@@ -41,14 +34,18 @@ jest.mock('react-dnd', () => ({
describe('DraggableTableRow Snapshot test', () => {
it('should render DraggableTableRow', async () => {
const { asFragment } = render(
<Table
components={{
body: {
row: DraggableTableRow,
},
}}
pagination={false}
/>,
<Provider store={store}>
<I18nextProvider i18n={i18n}>
<Table
components={{
body: {
row: DraggableTableRow,
},
}}
pagination={false}
/>
</I18nextProvider>
</Provider>,
);
expect(asFragment()).toMatchSnapshot();
});

View File

@@ -99,3 +99,5 @@ exports[`DraggableTableRow Snapshot test should render DraggableTableRow 1`] = `
</div>
</DocumentFragment>
`;
exports[`PipelinePage container test should render AddNewPipeline section 1`] = `<DocumentFragment />`;

View File

@@ -1,5 +1,4 @@
import {
_adapters,
BarController,
BarElement,
CategoryScale,
@@ -19,10 +18,8 @@ import {
} from 'chart.js';
import annotationPlugin from 'chartjs-plugin-annotation';
import { generateGridTitle } from 'container/GridPanelSwitch/utils';
import dayjs from 'dayjs';
import { useIsDarkMode } from 'hooks/useDarkMode';
import isEqual from 'lodash-es/isEqual';
import { useTimezone } from 'providers/Timezone';
import {
forwardRef,
memo,
@@ -65,17 +62,6 @@ Chart.register(
Tooltip.positioners.custom = TooltipPositionHandler;
// Map of Chart.js time formats to dayjs format strings
const formatMap = {
'HH:mm:ss': 'HH:mm:ss',
'HH:mm': 'HH:mm',
'MM/DD HH:mm': 'MM/DD HH:mm',
'MM/dd HH:mm': 'MM/DD HH:mm',
'MM/DD': 'MM/DD',
'YY-MM': 'YY-MM',
YY: 'YY',
};
const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
(
{
@@ -94,13 +80,11 @@ const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
dragSelectColor,
},
ref,
// eslint-disable-next-line sonarjs/cognitive-complexity
): JSX.Element => {
const nearestDatasetIndex = useRef<null | number>(null);
const chartRef = useRef<HTMLCanvasElement>(null);
const isDarkMode = useIsDarkMode();
const gridTitle = useMemo(() => generateGridTitle(title), [title]);
const { timezone } = useTimezone();
const currentTheme = isDarkMode ? 'dark' : 'light';
const xAxisTimeUnit = useXAxisTimeUnit(data); // Computes the relevant time unit for x axis by analyzing the time stamp data
@@ -128,22 +112,6 @@ const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
return 'rgba(231,233,237,0.8)';
}, [currentTheme]);
// Override Chart.js date adapter to use dayjs with timezone support
useEffect(() => {
_adapters._date.override({
format(time: number | Date, fmt: string) {
const dayjsTime = dayjs(time).tz(timezone.value);
const format = formatMap[fmt as keyof typeof formatMap];
if (!format) {
console.warn(`Missing datetime format for ${fmt}`);
return dayjsTime.format('YYYY-MM-DD HH:mm:ss'); // fallback format
}
return dayjsTime.format(format);
},
});
}, [timezone]);
const buildChart = useCallback(() => {
if (lineChartRef.current !== undefined) {
lineChartRef.current.destroy();
@@ -164,7 +132,6 @@ const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
isStacked,
onClickHandler,
data,
timezone,
);
const chartHasData = hasData(data);
@@ -199,7 +166,6 @@ const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
isStacked,
onClickHandler,
data,
timezone,
name,
type,
]);

View File

@@ -1,6 +1,5 @@
import { Chart, ChartConfiguration, ChartData, Color } from 'chart.js';
import * as chartjsAdapter from 'chartjs-adapter-date-fns';
import { Timezone } from 'components/CustomTimePicker/timezoneUtils';
import dayjs from 'dayjs';
import { MutableRefObject } from 'react';
@@ -51,7 +50,6 @@ export const getGraphOptions = (
isStacked: boolean | undefined,
onClickHandler: GraphOnClickHandler | undefined,
data: ChartData,
timezone: Timezone,
// eslint-disable-next-line sonarjs/cognitive-complexity
): CustomChartOptions => ({
animation: {
@@ -99,7 +97,7 @@ export const getGraphOptions = (
callbacks: {
title(context): string | string[] {
const date = dayjs(context[0].parsed.x);
return date.tz(timezone.value).format('MMM DD, YYYY, HH:mm:ss');
return date.format('MMM DD, YYYY, HH:mm:ss');
},
label(context): string | string[] {
let label = context.dataset.label || '';

View File

@@ -5,16 +5,18 @@ import { Button, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import { useNotifications } from 'hooks/useNotifications';
import { CheckCircle2, HandPlatter } from 'lucide-react';
import { useAppContext } from 'providers/App/App';
import { useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import AppReducer from 'types/reducer/app';
export default function WaitlistFragment({
entityType,
}: {
entityType: string;
}): JSX.Element {
const { user } = useAppContext();
const { user } = useSelector<AppState, AppReducer>((state) => state.app);
const { t } = useTranslation(['infraMonitoring']);
const { notifications } = useNotifications();

View File

@@ -4,7 +4,6 @@ export enum VIEWS {
TRACES = 'traces',
CONTAINERS = 'containers',
PROCESSES = 'processes',
EVENTS = 'events',
}
export const VIEW_TYPES = {
@@ -13,5 +12,4 @@ export const VIEW_TYPES = {
TRACES: VIEWS.TRACES,
CONTAINERS: VIEWS.CONTAINERS,
PROCESSES: VIEWS.PROCESSES,
EVENTS: VIEWS.EVENTS,
};

View File

@@ -6,11 +6,12 @@ import logEvent from 'api/common/logEvent';
import cx from 'classnames';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { FeatureKeys } from 'constants/features';
import useFeatureFlags from 'hooks/useFeatureFlag';
import useLicense from 'hooks/useLicense';
import { useNotifications } from 'hooks/useNotifications';
import { defaultTo } from 'lodash-es';
import { CreditCard, HelpCircle, X } from 'lucide-react';
import { useAppContext } from 'providers/App/App';
import { useEffect, useMemo, useState } from 'react';
import { useEffect, useState } from 'react';
import { useMutation } from 'react-query';
import { useLocation } from 'react-router-dom';
import { ErrorResponse, SuccessResponse } from 'types/api';
@@ -38,79 +39,31 @@ function LaunchChatSupport({
onHoverText = '',
intercomMessageDisabled = false,
}: LaunchChatSupportProps): JSX.Element | null {
const isChatSupportEnabled = useFeatureFlags(FeatureKeys.CHAT_SUPPORT)?.active;
const isCloudUserVal = isCloudUser();
const { notifications } = useNotifications();
const {
licenses,
isFetchingLicenses,
featureFlags,
isFetchingFeatureFlags,
featureFlagsFetchError,
isLoggedIn,
} = useAppContext();
const { data: licenseData, isFetching } = useLicense();
const [activeLicense, setActiveLicense] = useState<License | null>(null);
const [isAddCreditCardModalOpen, setIsAddCreditCardModalOpen] = useState(
false,
);
const { pathname } = useLocation();
const isPremiumChatSupportEnabled =
useFeatureFlags(FeatureKeys.PREMIUM_SUPPORT)?.active || false;
const isChatSupportEnabled = useMemo(() => {
if (!isFetchingFeatureFlags && (featureFlags || featureFlagsFetchError)) {
let isChatSupportEnabled = false;
if (featureFlags && featureFlags.length > 0) {
isChatSupportEnabled =
featureFlags.find((flag) => flag.name === FeatureKeys.CHAT_SUPPORT)
?.active || false;
}
return isChatSupportEnabled;
}
return false;
}, [featureFlags, featureFlagsFetchError, isFetchingFeatureFlags]);
const showAddCreditCardModal = useMemo(() => {
if (
!isFetchingFeatureFlags &&
(featureFlags || featureFlagsFetchError) &&
licenses
) {
let isChatSupportEnabled = false;
let isPremiumSupportEnabled = false;
const isCloudUserVal = isCloudUser();
if (featureFlags && featureFlags.length > 0) {
isChatSupportEnabled =
featureFlags.find((flag) => flag.name === FeatureKeys.CHAT_SUPPORT)
?.active || false;
isPremiumSupportEnabled =
featureFlags.find((flag) => flag.name === FeatureKeys.PREMIUM_SUPPORT)
?.active || false;
}
return (
isLoggedIn &&
!isPremiumSupportEnabled &&
isChatSupportEnabled &&
!licenses.trialConvertedToSubscription &&
isCloudUserVal
);
}
return false;
}, [
featureFlags,
featureFlagsFetchError,
isFetchingFeatureFlags,
isLoggedIn,
licenses,
]);
const showAddCreditCardModal =
!isPremiumChatSupportEnabled &&
!licenseData?.payload?.trialConvertedToSubscription;
useEffect(() => {
if (!isFetchingLicenses && licenses) {
const activeValidLicense =
licenses.licenses?.find((license) => license.isCurrent === true) || null;
setActiveLicense(activeValidLicense);
}
}, [isFetchingLicenses, licenses]);
const activeValidLicense =
licenseData?.payload?.licenses?.find(
(license) => license.isCurrent === true,
) || null;
setActiveLicense(activeValidLicense);
}, [licenseData, isFetching]);
const handleFacingIssuesClick = (): void => {
if (showAddCreditCardModal) {

View File

@@ -8,13 +8,13 @@ import LogDetail from 'components/LogDetail';
import { VIEW_TYPES } from 'components/LogDetail/constants';
import { unescapeString } from 'container/LogDetailedView/utils';
import { FontSize } from 'container/OptionsMenu/types';
import dayjs from 'dayjs';
import dompurify from 'dompurify';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
import { useIsDarkMode } from 'hooks/useDarkMode';
// utils
import { FlatLogData } from 'lib/logs/flatLogData';
import { useTimezone } from 'providers/Timezone';
import { useCallback, useMemo, useState } from 'react';
// interfaces
import { IField } from 'types/api/logs/fields';
@@ -174,20 +174,12 @@ function ListLogView({
[selectedFields],
);
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const timestampValue = useMemo(
() =>
typeof flattenLogData.timestamp === 'string'
? formatTimezoneAdjustedTimestamp(
flattenLogData.timestamp,
'YYYY-MM-DD HH:mm:ss.SSS',
)
: formatTimezoneAdjustedTimestamp(
flattenLogData.timestamp / 1e6,
'YYYY-MM-DD HH:mm:ss.SSS',
),
[flattenLogData.timestamp, formatTimezoneAdjustedTimestamp],
? dayjs(flattenLogData.timestamp).format('YYYY-MM-DD HH:mm:ss.SSS')
: dayjs(flattenLogData.timestamp / 1e6).format('YYYY-MM-DD HH:mm:ss.SSS'),
[flattenLogData.timestamp],
);
const logType = getLogIndicatorType(logData);

View File

@@ -6,6 +6,7 @@ import LogDetail from 'components/LogDetail';
import { VIEW_TYPES, VIEWS } from 'components/LogDetail/constants';
import { unescapeString } from 'container/LogDetailedView/utils';
import LogsExplorerContext from 'container/LogsExplorerContext';
import dayjs from 'dayjs';
import dompurify from 'dompurify';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
@@ -13,7 +14,6 @@ import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { FlatLogData } from 'lib/logs/flatLogData';
import { isEmpty, isNumber, isUndefined } from 'lodash-es';
import { useTimezone } from 'providers/Timezone';
import {
KeyboardEvent,
MouseEvent,
@@ -89,24 +89,16 @@ function RawLogView({
attributesText += ' | ';
}
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const text = useMemo(() => {
const date =
typeof data.timestamp === 'string'
? formatTimezoneAdjustedTimestamp(data.timestamp, 'YYYY-MM-DD HH:mm:ss.SSS')
: formatTimezoneAdjustedTimestamp(
data.timestamp / 1e6,
'YYYY-MM-DD HH:mm:ss.SSS',
);
? dayjs(data.timestamp)
: dayjs(data.timestamp / 1e6);
return `${date} | ${attributesText} ${data.body}`;
}, [
data.timestamp,
data.body,
attributesText,
formatTimezoneAdjustedTimestamp,
]);
return `${date.format('YYYY-MM-DD HH:mm:ss.SSS')} | ${attributesText} ${
data.body
}`;
}, [data.timestamp, data.body, attributesText]);
const handleClickExpand = useCallback(() => {
if (activeContextLog || isReadOnly) return;

View File

@@ -22,13 +22,6 @@
}
}
.state-indicator {
width: 15px;
.log-state-indicator {
padding: 0px;
}
}
.table-timestamp {
display: flex;
align-items: center;
@@ -36,6 +29,10 @@
.ant-typography {
margin-bottom: 0;
}
.log-state-indicator {
padding: 0px;
}
}
.lightMode {

View File

@@ -5,10 +5,10 @@ import { Typography } from 'antd';
import { ColumnsType } from 'antd/es/table';
import cx from 'classnames';
import { unescapeString } from 'container/LogDetailedView/utils';
import dayjs from 'dayjs';
import dompurify from 'dompurify';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { FlatLogData } from 'lib/logs/flatLogData';
import { useTimezone } from 'providers/Timezone';
import { useMemo } from 'react';
import { FORBID_DOM_PURIFY_TAGS } from 'utils/app';
@@ -44,8 +44,6 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
logs,
]);
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const columns: ColumnsType<Record<string, unknown>> = useMemo(() => {
const fieldColumns: ColumnsType<Record<string, unknown>> = fields
.filter((e) => e.name !== 'id')
@@ -75,38 +73,23 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
}
return [
{
// We do not need any title and data index for the log state indicator
title: '',
dataIndex: '',
key: 'state-indicator',
render: (_, item): ColumnTypeRender<Record<string, unknown>> => ({
children: (
<div className={cx('state-indicator', fontSize)}>
<LogStateIndicator
type={getLogIndicatorTypeForTable(item)}
fontSize={fontSize}
/>
</div>
),
}),
},
{
title: 'timestamp',
dataIndex: 'timestamp',
key: 'timestamp',
// https://github.com/ant-design/ant-design/discussions/36886
render: (field): ColumnTypeRender<Record<string, unknown>> => {
render: (field, item): ColumnTypeRender<Record<string, unknown>> => {
const date =
typeof field === 'string'
? formatTimezoneAdjustedTimestamp(field, 'YYYY-MM-DD HH:mm:ss.SSS')
: formatTimezoneAdjustedTimestamp(
field / 1e6,
'YYYY-MM-DD HH:mm:ss.SSS',
);
? dayjs(field).format('YYYY-MM-DD HH:mm:ss.SSS')
: dayjs(field / 1e6).format('YYYY-MM-DD HH:mm:ss.SSS');
return {
children: (
<div className="table-timestamp">
<LogStateIndicator
type={getLogIndicatorTypeForTable(item)}
fontSize={fontSize}
/>
<Typography.Paragraph ellipsis className={cx('text', fontSize)}>
{date}
</Typography.Paragraph>
@@ -142,15 +125,7 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
},
...(appendTo === 'end' ? fieldColumns : []),
];
}, [
fields,
isListViewPanel,
appendTo,
isDarkMode,
linesPerRow,
fontSize,
formatTimezoneAdjustedTimestamp,
]);
}, [fields, isListViewPanel, appendTo, isDarkMode, linesPerRow, fontSize]);
return { columns, dataSource: flattenLogData };
};

View File

@@ -152,6 +152,8 @@ export default function LogsFormatOptionsMenu({
const itemLength = optionsData.length;
if (!optionsData?.length || currentIndex < 0) return;
switch (e.key) {
case 'ArrowUp': {
const newValue = optionsData[Math.max(0, currentIndex - 1)]?.value;

View File

@@ -1,10 +1,31 @@
import getLocalStorageKey from 'api/browser/localstorage/get';
import NotFoundImage from 'assets/NotFound';
import { LOCALSTORAGE } from 'constants/localStorage';
import ROUTES from 'constants/routes';
import { useCallback } from 'react';
import { useDispatch } from 'react-redux';
import { Dispatch } from 'redux';
import AppActions from 'types/actions';
import { LOGGED_IN } from 'types/actions/app';
import { defaultText } from './constant';
import { Button, Container, Text, TextContainer } from './styles';
function NotFound({ text = defaultText }: Props): JSX.Element {
const dispatch = useDispatch<Dispatch<AppActions>>();
const isLoggedIn = getLocalStorageKey(LOCALSTORAGE.IS_LOGGED_IN);
const onClickHandler = useCallback(() => {
if (isLoggedIn) {
dispatch({
type: LOGGED_IN,
payload: {
isLoggedIn: true,
},
});
}
}, [dispatch, isLoggedIn]);
return (
<Container>
<NotFoundImage />
@@ -14,7 +35,7 @@ function NotFound({ text = defaultText }: Props): JSX.Element {
<Text>Page Not Found</Text>
</TextContainer>
<Button to={ROUTES.APPLICATION} tabIndex={0}>
<Button onClick={onClickHandler} to={ROUTES.APPLICATION} tabIndex={0}>
Return To Services Page
</Button>
</Container>

View File

@@ -53,7 +53,7 @@
display: flex;
align-items: center;
justify-content: space-between;
width: calc(100% - 24px);
width: 100%;
cursor: pointer;
&.filter-disabled {

View File

@@ -8,12 +8,10 @@ import { Button, Checkbox, Input, Skeleton, Typography } from 'antd';
import cx from 'classnames';
import { IQuickFiltersConfig } from 'components/QuickFilters/QuickFilters';
import { OPERATORS } from 'constants/queryBuilder';
import { DEBOUNCE_DELAY } from 'constants/queryBuilderFilterConfig';
import { getOperatorValue } from 'container/QueryBuilder/filters/QueryBuilderSearch/utils';
import { useGetAggregateValues } from 'hooks/queryBuilder/useGetAggregateValues';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import useDebouncedFn from 'hooks/useDebouncedFunction';
import { cloneDeep, isArray, isEmpty, isEqual, isFunction } from 'lodash-es';
import { cloneDeep, isArray, isEmpty, isEqual } from 'lodash-es';
import { ChevronDown, ChevronRight } from 'lucide-react';
import { useMemo, useState } from 'react';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
@@ -36,11 +34,10 @@ function setDefaultValues(
}
interface ICheckboxProps {
filter: IQuickFiltersConfig;
onFilterChange?: (query: Query) => void;
}
export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
const { filter, onFilterChange } = props;
const { filter } = props;
const [searchText, setSearchText] = useState<string>('');
const [isOpen, setIsOpen] = useState<boolean>(filter.defaultOpen);
const [visibleItemsCount, setVisibleItemsCount] = useState<number>(10);
@@ -53,9 +50,9 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
const { data, isLoading } = useGetAggregateValues(
{
aggregateOperator: filter.aggregateOperator || 'noop',
dataSource: filter.dataSource || DataSource.LOGS,
aggregateAttribute: filter.aggregateAttribute || '',
aggregateOperator: 'noop',
dataSource: DataSource.LOGS,
aggregateAttribute: '',
attributeKey: filter.attributeKey.key,
filterAttributeKeyDataType: filter.attributeKey.dataType || DataTypes.EMPTY,
tagType: filter.attributeKey.type || '',
@@ -75,11 +72,7 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
);
const currentAttributeKeys = attributeValues.slice(0, visibleItemsCount);
const setSearchTextDebounced = useDebouncedFn((...args) => {
setSearchText(args[0] as string);
}, DEBOUNCE_DELAY);
// derive the state of each filter key here in the renderer itself and keep it in sync with current query
// derive the state of each filter key here in the renderer itself and keep it in sync with staged query
// also we need to keep a note of last focussed query.
// eslint-disable-next-line sonarjs/cognitive-complexity
const currentFilterState = useMemo(() => {
@@ -166,12 +159,7 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
})),
},
};
if (onFilterChange && isFunction(onFilterChange)) {
onFilterChange(preparedQuery);
} else {
redirectWithQueryBuilderData(preparedQuery);
}
redirectWithQueryBuilderData(preparedQuery);
};
const isSomeFilterPresentForCurrentAttribute = currentQuery.builder.queryData?.[
@@ -403,11 +391,7 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
},
};
if (onFilterChange && isFunction(onFilterChange)) {
onFilterChange(finalQuery);
} else {
redirectWithQueryBuilderData(finalQuery);
}
redirectWithQueryBuilderData(finalQuery);
};
return (
@@ -456,7 +440,7 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
<section className="search">
<Input
placeholder="Filter values"
onChange={(e): void => setSearchTextDebounced(e.target.value)}
onChange={(e): void => setSearchText(e.target.value)}
disabled={isFilterDisabled}
/>
</section>
@@ -527,7 +511,3 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
</div>
);
}
CheckboxFilter.defaultProps = {
onFilterChange: null,
};

View File

@@ -15,8 +15,6 @@
display: flex;
align-items: center;
gap: 6px;
width: 100%;
justify-content: flex-start;
.text {
color: var(--bg-vanilla-400);
@@ -52,8 +50,6 @@
display: flex;
align-items: center;
gap: 12px;
width: 100%;
justify-content: flex-end;
.divider-filter {
width: 1px;

View File

@@ -7,10 +7,9 @@ import {
} from '@ant-design/icons';
import { Tooltip, Typography } from 'antd';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { cloneDeep, isFunction } from 'lodash-es';
import { cloneDeep } from 'lodash-es';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
import Checkbox from './FilterRenderers/Checkbox/Checkbox';
import Slider from './FilterRenderers/Slider/Slider';
@@ -34,9 +33,6 @@ export interface IQuickFiltersConfig {
type: FiltersType;
title: string;
attributeKey: BaseAutocompleteData;
aggregateOperator?: string;
aggregateAttribute?: string;
dataSource?: DataSource;
customRendererForValue?: (value: string) => JSX.Element;
defaultOpen: boolean;
}
@@ -44,12 +40,10 @@ export interface IQuickFiltersConfig {
interface IQuickFiltersProps {
config: IQuickFiltersConfig[];
handleFilterVisibilityChange: () => void;
source?: string | null;
onFilterChange?: (query: Query) => void;
}
export default function QuickFilters(props: IQuickFiltersProps): JSX.Element {
const { config, handleFilterVisibilityChange, source, onFilterChange } = props;
const { config, handleFilterVisibilityChange } = props;
const {
currentQuery,
@@ -84,63 +78,47 @@ export default function QuickFilters(props: IQuickFiltersProps): JSX.Element {
})),
},
};
if (onFilterChange && isFunction(onFilterChange)) {
onFilterChange(preparedQuery);
} else {
redirectWithQueryBuilderData(preparedQuery);
}
redirectWithQueryBuilderData(preparedQuery);
};
const lastQueryName =
currentQuery.builder.queryData?.[lastUsedQuery || 0]?.queryName;
const isInfraMonitoring = source === 'infra-monitoring';
return (
<div className="quick-filters">
{!isInfraMonitoring && (
<section className="header">
<section className="left-actions">
<FilterOutlined />
<Typography.Text className="text">Filters for</Typography.Text>
<Tooltip title={`Filter currently in sync with query ${lastQueryName}`}>
<Typography.Text className="sync-tag">{lastQueryName}</Typography.Text>
</Tooltip>
</section>
<section className="right-actions">
<Tooltip title="Reset All">
<SyncOutlined className="sync-icon" onClick={handleReset} />
</Tooltip>
<div className="divider-filter" />
<Tooltip title="Collapse Filters">
<VerticalAlignTopOutlined
rotate={270}
onClick={handleFilterVisibilityChange}
/>
</Tooltip>
</section>
<section className="header">
<section className="left-actions">
<FilterOutlined />
<Typography.Text className="text">Filters for</Typography.Text>
<Tooltip title={`Filter currently in sync with query ${lastQueryName}`}>
<Typography.Text className="sync-tag">{lastQueryName}</Typography.Text>
</Tooltip>
</section>
)}
<section className="right-actions">
<Tooltip title="Reset All">
<SyncOutlined className="sync-icon" onClick={handleReset} />
</Tooltip>
<div className="divider-filter" />
<Tooltip title="Collapse Filters">
<VerticalAlignTopOutlined
rotate={270}
onClick={handleFilterVisibilityChange}
/>
</Tooltip>
</section>
</section>
<section className="filters">
{config.map((filter) => {
switch (filter.type) {
case FiltersType.CHECKBOX:
return <Checkbox filter={filter} onFilterChange={onFilterChange} />;
return <Checkbox filter={filter} />;
case FiltersType.SLIDER:
return <Slider filter={filter} />;
default:
return <Checkbox filter={filter} onFilterChange={onFilterChange} />;
return <Checkbox filter={filter} />;
}
})}
</section>
</div>
);
}
QuickFilters.defaultProps = {
source: null,
onFilterChange: null,
};

View File

@@ -1,28 +1,40 @@
import { Button, Space } from 'antd';
import setFlags from 'api/user/setFlags';
import MessageTip from 'components/MessageTip';
import { useAppContext } from 'providers/App/App';
import { useCallback } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { Dispatch } from 'redux';
import { AppState } from 'store/reducers';
import AppActions from 'types/actions';
import { UPDATE_USER_FLAG } from 'types/actions/app';
import { UserFlags } from 'types/api/user/setFlags';
import AppReducer from 'types/reducer/app';
import ReleaseNoteProps from '../ReleaseNoteProps';
export default function ReleaseNote0120({
release,
}: ReleaseNoteProps): JSX.Element | null {
const { user, setUserFlags } = useAppContext();
const { user } = useSelector<AppState, AppReducer>((state) => state.app);
const dispatch = useDispatch<Dispatch<AppActions>>();
const handleDontShow = useCallback(async (): Promise<void> => {
const flags: UserFlags = { ReleaseNote0120Hide: 'Y' };
try {
setUserFlags(flags);
dispatch({
type: UPDATE_USER_FLAG,
payload: {
flags,
},
});
if (!user) {
// no user is set, so escape the routine
return;
}
const response = await setFlags({ userId: user.id, flags });
const response = await setFlags({ userId: user?.userId, flags });
if (response.statusCode !== 200) {
console.log('failed to complete do not show status', response.error);
@@ -32,7 +44,7 @@ export default function ReleaseNote0120({
// the user can switch the do no show option again in the further.
console.log('unexpected error: failed to complete do not show status', e);
}
}, [setUserFlags, user]);
}, [dispatch, user]);
return (
<MessageTip

View File

@@ -1,7 +1,6 @@
import ReleaseNoteProps from 'components/ReleaseNote/ReleaseNoteProps';
import ReleaseNote0120 from 'components/ReleaseNote/Releases/ReleaseNote0120';
import ROUTES from 'constants/routes';
import { useAppContext } from 'providers/App/App';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { UserFlags } from 'types/api/user/setFlags';
@@ -45,13 +44,12 @@ const allComponentMap: ComponentMapType[] = [
// ReleaseNote prints release specific warnings and notes that
// user needs to be aware of before using the upgraded version.
function ReleaseNote({ path }: ReleaseNoteProps): JSX.Element | null {
const { user } = useAppContext();
const { currentVersion } = useSelector<AppState, AppReducer>(
const { userFlags, currentVersion } = useSelector<AppState, AppReducer>(
(state) => state.app,
);
const c = allComponentMap.find((item) =>
item.match(path, currentVersion, user.flags),
item.match(path, currentVersion, userFlags),
);
if (!c) {

View File

@@ -1,13 +1,11 @@
import { Typography } from 'antd';
import { useTimezone } from 'providers/Timezone';
import convertDateToAmAndPm from 'lib/convertDateToAmAndPm';
import getFormattedDate from 'lib/getFormatedDate';
function Time({ CreatedOrUpdateTime }: DateProps): JSX.Element {
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const time = new Date(CreatedOrUpdateTime);
const timeString = formatTimezoneAdjustedTimestamp(
time,
'MM/DD/YYYY hh:mm:ss A (UTC Z)',
);
const date = getFormattedDate(time);
const timeString = `${date} ${convertDateToAmAndPm(time)}`;
return <Typography>{timeString}</Typography>;
}

View File

@@ -8,7 +8,7 @@ function TabLabel({
isDisabled,
tooltipText,
}: TabLabelProps): JSX.Element {
const currentLabel = <span data-testid={`${label}`}>{label}</span>;
const currentLabel = <span>{label}</span>;
if (isDisabled) {
return (

View File

@@ -1,55 +0,0 @@
.div-table {
border: 1px solid lightgray;
width: fit-content;
}
.div-tr {
display: flex;
width: fit-content;
height: 30px;
}
.div-th,
.div-td {
box-shadow: inset 0 0 0 1px lightgray;
padding: 0.25rem;
}
.div-th {
padding: 2px 4px;
position: relative;
font-weight: bold;
text-align: center;
height: 30px;
}
.div-td {
height: 30px;
}
.resizer {
position: absolute;
top: 0;
height: 100%;
right: 0;
width: 5px;
background: rgba(0, 0, 0, 0.5);
cursor: col-resize;
user-select: none;
touch-action: none;
}
.resizer.isResizing {
background: blue;
opacity: 1;
}
@media (hover: hover) {
.resizer {
opacity: 0;
}
*:hover > .resizer {
opacity: 1;
}
}

View File

@@ -1,135 +0,0 @@
import './TableV3.styles.scss';
import {
ColumnDef,
flexRender,
getCoreRowModel,
Table,
useReactTable,
} from '@tanstack/react-table';
import React, { useMemo } from 'react';
// here we are manually rendering the table body so that we can memoize the same for performant re-renders
function TableBody<T>({ table }: { table: Table<T> }): JSX.Element {
return (
<div className="div-tbody">
{table.getRowModel().rows.map((row) => (
<div key={row.id} className="div-tr">
{row.getVisibleCells().map((cell) => (
<div
key={cell.id}
className="div-td"
// we are manually setting the column width here based on the calculated column vars
style={{
width: `calc(var(--col-${cell.column.id}-size) * 1px)`,
}}
>
{cell.renderValue<any>()}
</div>
))}
</div>
))}
</div>
);
}
// memoize the table body based on the data object being passed to the table
const MemoizedTableBody = React.memo(
TableBody,
(prev, next) => prev.table.options.data === next.table.options.data,
) as typeof TableBody;
interface ITableConfig {
defaultColumnMinSize: number;
defaultColumnMaxSize: number;
}
interface ITableV3Props<T> {
columns: ColumnDef<T, any>[];
data: T[];
config: ITableConfig;
}
export function TableV3<T>(props: ITableV3Props<T>): JSX.Element {
const { data, columns, config } = props;
const table = useReactTable({
data,
columns,
defaultColumn: {
minSize: config.defaultColumnMinSize,
maxSize: config.defaultColumnMaxSize,
},
columnResizeMode: 'onChange',
getCoreRowModel: getCoreRowModel(),
debugTable: true,
debugHeaders: true,
debugColumns: true,
});
/**
* Instead of calling `column.getSize()` on every render for every header
* and especially every data cell (very expensive),
* we will calculate all column sizes at once at the root table level in a useMemo
* and pass the column sizes down as CSS variables to the <table> element.
*/
const columnSizeVars = useMemo(() => {
const headers = table.getFlatHeaders();
const colSizes: { [key: string]: number } = {};
for (let i = 0; i < headers.length; i++) {
const header = headers[i]!;
colSizes[`--header-${header.id}-size`] = header.getSize();
colSizes[`--col-${header.column.id}-size`] = header.column.getSize();
}
return colSizes;
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [table.getState().columnSizingInfo, table.getState().columnSizing]);
return (
<div className="p-2">
{/* Here in the <table> equivalent element (surrounds all table head and data cells), we will define our CSS variables for column sizes */}
<div
className="div-table"
style={{
...columnSizeVars, // Define column sizes on the <table> element
width: table.getTotalSize(),
}}
>
<div className="div-thead">
{table.getHeaderGroups().map((headerGroup) => (
<div key={headerGroup.id} className="div-tr">
{headerGroup.headers.map((header) => (
<div
key={header.id}
className="div-th"
style={{
width: `calc(var(--header-${header?.id}-size) * 1px)`,
}}
>
{header.isPlaceholder
? null
: flexRender(header.column.columnDef.header, header.getContext())}
<div
{...{
onDoubleClick: (): void => header.column.resetSize(),
onMouseDown: header.getResizeHandler(),
onTouchStart: header.getResizeHandler(),
className: `resizer ${
header.column.getIsResizing() ? 'isResizing' : ''
}`,
}}
/>
</div>
))}
</div>
))}
</div>
{/* When resizing any column we will render this special memoized version of our table body */}
{table.getState().columnSizingInfo.isResizingColumn ? (
<MemoizedTableBody table={table} />
) : (
<TableBody table={table} />
)}
</div>
</div>
);
}

View File

@@ -1,4 +0,0 @@
.timeline-v2-container {
flex: 1;
overflow: visible;
}

View File

@@ -1,90 +0,0 @@
import './TimelineV2.styles.scss';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useEffect, useState } from 'react';
import { useMeasure } from 'react-use';
import {
getIntervals,
getMinimumIntervalsBasedOnWidth,
Interval,
} from './utils';
interface ITimelineV2Props {
startTimestamp: number;
endTimestamp: number;
timelineHeight: number;
}
function TimelineV2(props: ITimelineV2Props): JSX.Element {
const { startTimestamp, endTimestamp, timelineHeight } = props;
const [intervals, setIntervals] = useState<Interval[]>([]);
const [ref, { width }] = useMeasure<HTMLDivElement>();
const isDarkMode = useIsDarkMode();
useEffect(() => {
const spread = endTimestamp - startTimestamp;
if (spread < 0) {
return;
}
const minIntervals = getMinimumIntervalsBasedOnWidth(width);
const intervalisedSpread = (spread / minIntervals) * 1.0;
setIntervals(getIntervals(intervalisedSpread, spread));
}, [startTimestamp, endTimestamp, width]);
if (endTimestamp < startTimestamp) {
console.error(
'endTimestamp cannot be less than startTimestamp',
startTimestamp,
endTimestamp,
);
return <div />;
}
return (
<div ref={ref as never} className="timeline-v2-container">
<svg
width={width}
height={timelineHeight}
xmlns="http://www.w3.org/2000/svg"
overflow="visible"
>
<line
x1="0"
y1={timelineHeight}
x2={width}
y2={timelineHeight}
stroke={isDarkMode ? 'white' : 'black'}
strokeWidth="1"
/>
{intervals &&
intervals.length > 0 &&
intervals.map((interval, index) => (
<g
transform={`translate(${(interval.percentage * width) / 100},0)`}
key={`${interval.percentage + interval.label + index}`}
textAnchor="middle"
fontSize="0.6rem"
>
<text
x={index === intervals.length - 1 ? -10 : 0}
y={2 * Math.floor(timelineHeight / 4)}
fill={isDarkMode ? 'white' : 'black'}
>
{interval.label}
</text>
<line
y1={3 * Math.floor(timelineHeight / 4)}
y2={timelineHeight + 0.5}
stroke={isDarkMode ? 'white' : 'black'}
strokeWidth="1"
/>
</g>
))}
</svg>
</div>
);
}
export default TimelineV2;

View File

@@ -1,118 +0,0 @@
import { toFixed } from 'utils/toFixed';
type TTimeUnitName = 'ms' | 's' | 'm' | 'hr' | 'day' | 'week';
export interface IIntervalUnit {
name: TTimeUnitName;
multiplier: number;
}
export interface Interval {
label: string;
percentage: number;
}
export const INTERVAL_UNITS: IIntervalUnit[] = [
{
name: 'ms',
multiplier: 1,
},
{
name: 's',
multiplier: 1 / 1e3,
},
{
name: 'm',
multiplier: 1 / (1e3 * 60),
},
{
name: 'hr',
multiplier: 1 / (1e3 * 60 * 60),
},
{
name: 'day',
multiplier: 1 / (1e3 * 60 * 60 * 24),
},
{
name: 'week',
multiplier: 1 / (1e3 * 60 * 60 * 24 * 7),
},
];
export const getMinimumIntervalsBasedOnWidth = (width: number): number => {
// S
if (width < 640) {
return 5;
}
// M
if (width < 768) {
return 6;
}
// L
if (width < 1024) {
return 8;
}
return 10;
};
export const resolveTimeFromInterval = (
intervalTime: number,
intervalUnit: IIntervalUnit,
): number => intervalTime * intervalUnit.multiplier;
export function getIntervals(
intervalSpread: number,
baseSpread: number,
): Interval[] {
const integerPartString = intervalSpread.toString().split('.')[0];
const integerPartLength = integerPartString.length;
const intervalSpreadNormalized =
intervalSpread < 1.0
? intervalSpread
: Math.floor(Number(integerPartString) / 10 ** (integerPartLength - 1)) *
10 ** (integerPartLength - 1);
let intervalUnit = INTERVAL_UNITS[0];
for (let idx = INTERVAL_UNITS.length - 1; idx >= 0; idx -= 1) {
const standardInterval = INTERVAL_UNITS[idx];
if (intervalSpread * standardInterval.multiplier >= 1) {
intervalUnit = INTERVAL_UNITS[idx];
break;
}
}
intervalUnit = intervalUnit || INTERVAL_UNITS[0];
const intervals: Interval[] = [
{
label: `${toFixed(resolveTimeFromInterval(0, intervalUnit), 2)}${
intervalUnit.name
}`,
percentage: 0,
},
];
let tempBaseSpread = baseSpread;
let elapsedIntervals = 0;
while (tempBaseSpread && intervals.length < 20) {
let intervalTime;
if (tempBaseSpread <= 1.5 * intervalSpreadNormalized) {
intervalTime = elapsedIntervals + tempBaseSpread;
tempBaseSpread = 0;
} else {
intervalTime = elapsedIntervals + intervalSpreadNormalized;
tempBaseSpread -= intervalSpreadNormalized;
}
elapsedIntervals = intervalTime;
const interval: Interval = {
label: `${toFixed(resolveTimeFromInterval(intervalTime, intervalUnit), 2)}${
intervalUnit.name
}`,
percentage: (intervalTime / baseSpread) * 100,
};
intervals.push(interval);
}
return intervals;
}

View File

@@ -21,7 +21,4 @@ export enum LOCALSTORAGE {
THEME_ANALYTICS_V1 = 'THEME_ANALYTICS_V1',
LAST_USED_SAVED_VIEWS = 'LAST_USED_SAVED_VIEWS',
SHOW_LOGS_QUICK_FILTERS = 'SHOW_LOGS_QUICK_FILTERS',
USER_ID = 'USER_ID',
PREFERRED_TIMEZONE = 'PREFERRED_TIMEZONE',
UNAUTHENTICATED_ROUTE_HIT = 'UNAUTHENTICATED_ROUTE_HIT',
}

View File

@@ -21,5 +21,4 @@ export const REACT_QUERY_KEY = {
GET_HOST_LIST: 'GET_HOST_LIST',
UPDATE_ALERT_RULE: 'UPDATE_ALERT_RULE',
GET_ACTIVE_LICENSE_V3: 'GET_ACTIVE_LICENSE_V3',
GET_POD_LIST: 'GET_POD_LIST',
};

View File

@@ -34,8 +34,7 @@ const ROUTES = {
MY_SETTINGS: '/my-settings',
SETTINGS: '/settings',
ORG_SETTINGS: '/settings/org-settings',
CUSTOM_DOMAIN_SETTINGS: '/settings/custom-domain-settings',
API_KEYS: '/settings/api-keys',
API_KEYS: '/settings/access-tokens',
INGESTION_SETTINGS: '/settings/ingestion-settings',
SOMETHING_WENT_WRONG: '/something-went-wrong',
UN_AUTHORIZED: '/un-authorized',
@@ -62,7 +61,6 @@ const ROUTES = {
MESSAGING_QUEUES: '/messaging-queues',
MESSAGING_QUEUES_DETAIL: '/messaging-queues/detail',
INFRASTRUCTURE_MONITORING_HOSTS: '/infrastructure-monitoring/hosts',
INFRASTRUCTURE_MONITORING_KUBERNETES: '/infrastructure-monitoring/kubernetes',
} as const;
export default ROUTES;

View File

@@ -1,3 +0,0 @@
export const TimezonePickerShortcuts = {
CloseTimezonePicker: 'escape',
};

View File

@@ -26,9 +26,9 @@ describe('APIKeys component', () => {
});
it('renders APIKeys component without crashing', () => {
expect(screen.getByText('API Keys')).toBeInTheDocument();
expect(screen.getByText('Access Tokens')).toBeInTheDocument();
expect(
screen.getByText('Create and manage API keys for the SigNoz API'),
screen.getByText('Create and manage access tokens for the SigNoz API'),
).toBeInTheDocument();
});
@@ -40,16 +40,16 @@ describe('APIKeys component', () => {
);
await waitFor(() => {
expect(screen.getByText('No Expiry Key')).toBeInTheDocument();
expect(screen.getByText('1-5 of 18 keys')).toBeInTheDocument();
expect(screen.getByText('No Expiry Token')).toBeInTheDocument();
expect(screen.getByText('1-5 of 18 tokens')).toBeInTheDocument();
});
});
it('opens add new key modal on button click', async () => {
fireEvent.click(screen.getByText('New Key'));
fireEvent.click(screen.getByText('New Token'));
await waitFor(() => {
const createNewKeyBtn = screen.getByRole('button', {
name: /Create new key/i,
name: /Create new token/i,
});
expect(createNewKeyBtn).toBeInTheDocument();
@@ -57,10 +57,10 @@ describe('APIKeys component', () => {
});
it('closes add new key modal on cancel button click', async () => {
fireEvent.click(screen.getByText('New Key'));
fireEvent.click(screen.getByText('New Token'));
const createNewKeyBtn = screen.getByRole('button', {
name: /Create new key/i,
name: /Create new token/i,
});
await waitFor(() => {
@@ -79,10 +79,10 @@ describe('APIKeys component', () => {
),
);
fireEvent.click(screen.getByText('New Key'));
fireEvent.click(screen.getByText('New Token'));
const createNewKeyBtn = screen.getByRole('button', {
name: /Create new key/i,
name: /Create new token/i,
});
await waitFor(() => {
@@ -90,7 +90,7 @@ describe('APIKeys component', () => {
});
act(() => {
const inputElement = screen.getByPlaceholderText('Enter Key Name');
const inputElement = screen.getByPlaceholderText('Enter Token Name');
fireEvent.change(inputElement, { target: { value: 'Top Secret' } });
fireEvent.click(screen.getByTestId('create-form-admin-role-btn'));
fireEvent.click(createNewKeyBtn);

View File

@@ -44,12 +44,14 @@ import {
View,
X,
} from 'lucide-react';
import { useAppContext } from 'providers/App/App';
import { ChangeEvent, useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useMutation } from 'react-query';
import { useSelector } from 'react-redux';
import { useCopyToClipboard } from 'react-use';
import { AppState } from 'store/reducers';
import { APIKeyProps } from 'types/api/pat/types';
import AppReducer from 'types/reducer/app';
import { USER_ROLES } from 'types/roles';
export const showErrorNotification = (
@@ -97,7 +99,7 @@ export const getDateDifference = (
};
function APIKeys(): JSX.Element {
const { user } = useAppContext();
const { user } = useSelector<AppState, AppReducer>((state) => state.app);
const { notifications } = useNotifications();
const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false);
const [isAddModalOpen, setIsAddModalOpen] = useState(false);
@@ -512,15 +514,15 @@ function APIKeys(): JSX.Element {
<div className="api-key-container">
<div className="api-key-content">
<header>
<Typography.Title className="title">API Keys</Typography.Title>
<Typography.Title className="title">Access Tokens </Typography.Title>
<Typography.Text className="subtitle">
Create and manage API keys for the SigNoz API
Create and manage access tokens for the SigNoz API
</Typography.Text>
</header>
<div className="api-keys-search-add-new">
<Input
placeholder="Search for keys..."
placeholder="Search for token..."
prefix={<Search size={12} color={Color.BG_VANILLA_400} />}
value={searchValue}
onChange={handleSearch}
@@ -531,7 +533,7 @@ function APIKeys(): JSX.Element {
type="primary"
onClick={showAddModal}
>
<Plus size={14} /> New Key
<Plus size={14} /> New Token
</Button>
</div>
@@ -544,7 +546,7 @@ function APIKeys(): JSX.Element {
pageSize: 5,
hideOnSinglePage: true,
showTotal: (total: number, range: number[]): string =>
`${range[0]}-${range[1]} of ${total} keys`,
`${range[0]}-${range[1]} of ${total} tokens`,
}}
/>
</div>
@@ -552,7 +554,7 @@ function APIKeys(): JSX.Element {
{/* Delete Key Modal */}
<Modal
className="delete-api-key-modal"
title={<span className="title">Delete Key</span>}
title={<span className="title">Delete Token</span>}
open={isDeleteModalOpen}
closable
afterClose={handleModalClose}
@@ -574,7 +576,7 @@ function APIKeys(): JSX.Element {
onClick={onDeleteHandler}
className="delete-btn"
>
Delete key
Delete Token
</Button>,
]}
>
@@ -588,7 +590,7 @@ function APIKeys(): JSX.Element {
{/* Edit Key Modal */}
<Modal
className="api-key-modal"
title="Edit key"
title="Edit token"
open={isEditModalOpen}
key="edit-api-key-modal"
afterClose={handleModalClose}
@@ -612,7 +614,7 @@ function APIKeys(): JSX.Element {
icon={<Check size={14} />}
onClick={onUpdateApiKey}
>
Update key
Update Token
</Button>,
]}
>
@@ -632,7 +634,7 @@ function APIKeys(): JSX.Element {
label="Name"
rules={[{ required: true }, { type: 'string', min: 6 }]}
>
<Input placeholder="Enter Key Name" autoFocus />
<Input placeholder="Enter Token Name" autoFocus />
</Form.Item>
<Form.Item name="role" label="Role">
@@ -666,7 +668,7 @@ function APIKeys(): JSX.Element {
{/* Create New Key Modal */}
<Modal
className="api-key-modal"
title="Create new key"
title="Create new token"
open={isAddModalOpen}
key="create-api-key-modal"
closable
@@ -683,7 +685,7 @@ function APIKeys(): JSX.Element {
onClick={handleCopyClose}
icon={<Check size={12} />}
>
Copy key and close
Copy token and close
</Button>,
]
: [
@@ -704,7 +706,7 @@ function APIKeys(): JSX.Element {
loading={isLoadingCreateAPIKey}
onClick={onCreateAPIKey}
>
Create new key
Create new token
</Button>,
]
}
@@ -728,7 +730,7 @@ function APIKeys(): JSX.Element {
rules={[{ required: true }, { type: 'string', min: 6 }]}
validateTrigger="onFinish"
>
<Input placeholder="Enter Key Name" autoFocus />
<Input placeholder="Enter Token Name" autoFocus />
</Form.Item>
<Form.Item name="role" label="Role">
@@ -769,7 +771,7 @@ function APIKeys(): JSX.Element {
{showNewAPIKeyDetails && (
<div className="api-key-info-container">
<Row>
<Col span={8}>Key</Col>
<Col span={8}>Token</Col>
<Col span={16}>
<span className="copyable-text">
<Typography.Text>

View File

@@ -7,7 +7,6 @@ import useUrlQuery from 'hooks/useUrlQuery';
import history from 'lib/history';
import heatmapPlugin from 'lib/uPlotLib/plugins/heatmapPlugin';
import timelinePlugin from 'lib/uPlotLib/plugins/timelinePlugin';
import { useTimezone } from 'providers/Timezone';
import { useMemo, useRef } from 'react';
import { useDispatch } from 'react-redux';
import { UpdateTimeInterval } from 'store/actions';
@@ -49,7 +48,6 @@ function HorizontalTimelineGraph({
const urlQuery = useUrlQuery();
const dispatch = useDispatch();
const { timezone } = useTimezone();
const options: uPlot.Options = useMemo(
() => ({
@@ -118,18 +116,8 @@ function HorizontalTimelineGraph({
}),
]
: [],
tzDate: (timestamp: number): Date =>
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value),
}),
[
width,
isDarkMode,
transformedData.length,
urlQuery,
dispatch,
timezone.value,
],
[width, isDarkMode, transformedData.length, urlQuery, dispatch],
);
return <Uplot data={transformedData} options={options} />;
}

View File

@@ -7,7 +7,6 @@ import {
useGetAlertRuleDetailsTimelineTable,
useTimelineTable,
} from 'pages/AlertDetails/hooks';
import { useTimezone } from 'providers/Timezone';
import { HTMLAttributes, useMemo, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { AlertRuleTimelineTableResponse } from 'types/api/alerts/def';
@@ -42,8 +41,6 @@ function TimelineTable(): JSX.Element {
const { t } = useTranslation('common');
const { formatTimezoneAdjustedTimestamp } = useTimezone();
if (isError || !isValidRuleId || !ruleId) {
return <div>{t('something_went_wrong')}</div>;
}
@@ -67,7 +64,6 @@ function TimelineTable(): JSX.Element {
filters,
labels: labels ?? {},
setFilters,
formatTimezoneAdjustedTimestamp,
})}
onRow={handleRowClick}
dataSource={timelineData}

View File

@@ -8,7 +8,6 @@ import ClientSideQBSearch, {
import { ConditionalAlertPopover } from 'container/AlertHistory/AlertPopover/AlertPopover';
import { transformKeyValuesToAttributeValuesMap } from 'container/QueryBuilder/filters/utils';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
import { Search } from 'lucide-react';
import AlertLabels, {
AlertLabelsProps,
@@ -17,6 +16,7 @@ import AlertState from 'pages/AlertDetails/AlertHeader/AlertState/AlertState';
import { useMemo } from 'react';
import { AlertRuleTimelineTableResponse } from 'types/api/alerts/def';
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
import { formatEpochTimestamp } from 'utils/timeUtils';
const transformLabelsToQbKeys = (
labels: AlertRuleTimelineTableResponse['labels'],
@@ -74,15 +74,10 @@ export const timelineTableColumns = ({
filters,
labels,
setFilters,
formatTimezoneAdjustedTimestamp,
}: {
filters: TagFilter;
labels: AlertLabelsProps['labels'];
setFilters: (filters: TagFilter) => void;
formatTimezoneAdjustedTimestamp: (
input: TimestampInput,
format?: string,
) => string;
}): ColumnsType<AlertRuleTimelineTableResponse> => [
{
title: 'STATE',
@@ -111,9 +106,7 @@ export const timelineTableColumns = ({
dataIndex: 'unixMilli',
width: 200,
render: (value): JSX.Element => (
<div className="alert-rule__created-at">
{formatTimezoneAdjustedTimestamp(value, 'MMM D, YYYY ⎯ HH:mm:ss')}
</div>
<div className="alert-rule__created-at">{formatEpochTimestamp(value)}</div>
),
},
{

Some files were not shown because too many files have changed in this diff Show More