Compare commits

..

21 Commits

Author SHA1 Message Date
eKuG
b6445bc0ad feat: added support for custom quick filters 2025-04-21 12:29:28 +05:30
eKuG
ce81ab73b1 feat: added support for custom quick filters 2025-04-21 12:28:36 +05:30
eKuG
20f7615a80 feat: added support for custom quick filters 2025-04-21 11:59:57 +05:30
eKuG
9777b020c5 feat: added support for custom quick filters 2025-04-21 11:58:54 +05:30
eKuG
be72e2ea1d feat: added support for custom quick filters 2025-04-21 11:37:57 +05:30
Ekansh Gupta
38a5a21ff0 Merge branch 'main' into quickFiltersFeature 2025-04-17 16:25:44 +05:30
eKuG
ca6f90926c feat: added support for custom quick filters 2025-04-17 16:18:02 +05:30
Srikanth Chekuri
b0d19035a4 chore: add where clause visitor implementation for query expression (#7564) 2025-04-17 15:54:36 +05:30
Nityananda Gohain
054dea366e fix: proper formatting of floating point (#7653)
* fix: proper formatting of floating point

* fix: old trace

* fix: add tests

* fix: use strconv.FormatFloat instead
2025-04-17 06:02:41 +00:00
primus-bot[bot]
aaf0b597dc chore(release): bump to v0.79.1 (#7655)
#### Summary
 - Release SigNoz v0.79.1

 Created by [Primus-Bot](https://github.com/apps/primus-bot)

Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2025-04-17 00:02:05 +05:30
Prashant Shahi
19372c8194 ci(build): use unique cache key for the internal/public builds (#7654)
### Summary

- unique cache keys for the internal/public builds

Signed-off-by: Prashant Shahi <prashant@signoz.io>
2025-04-16 23:37:05 +05:30
Vibhu Pandey
eb74adad44 test(integration): set the base for integration tests (#7606)
* test(integration): set the base for integration tests

* ci: add ci pipeline for integration test

* ci: add ci pipeline for integration test

* ci: add ci pipeline for integration test

* ci: add ci pipeline for integration test

* ci: add ci pipeline for integration test

* ci: add ci pipeline for integration test

* ci: add ci pipeline for integration test

* ci: add ci pipeline for integration test

* ci: add ci pipeline for integration test

* ci: add ci pipeline for integration test

* ci: add ci pipeline for integration test

* ci: add ci pipeline for integration test

* ci: add ci pipeline for integration test

* ci: add ci pipeline for integration test

* ci: add ci pipeline for integration test

* ci: add ci pipeline for integration test
2025-04-16 18:54:05 +05:30
Srikanth Chekuri
d5c04e1342 chore: log original query failed to transform (#7641) 2025-04-16 14:40:54 +05:30
primus-bot[bot]
2b9632c8fd chore(release): bump to v0.79.0 (#7643)
#### Summary
 - Release SigNoz v0.79.0
 - Bump SigNoz OTel Collector to v0.111.39

 Created by [Primus-Bot](https://github.com/apps/primus-bot)

Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2025-04-16 13:36:31 +05:30
Prashant Shahi
24920ae903 chore(prereleaser): update cron schedule - 6:30AM UTC (#7640)
### Summary

- update preleaser cron schedule to 6:30AM UTC

Signed-off-by: Prashant Shahi <prashant@signoz.io>
2025-04-16 07:20:47 +00:00
Prashant Shahi
6f096632a2 chore(build-staging): only include telemetry tunnel FE envs (#7637)
### Summary

- only include telemetry tunnel FE environment variables for the staging build

---------

Signed-off-by: Prashant Shahi <prashant@signoz.io>
2025-04-16 12:38:30 +05:30
Piyush Singariya
a42eacec4b chore: enhancing JSON Parser handling (#7591)
* feat: enhancing JSON Parser handling

* fix: updating collector version

* chore: updating go.mod reference for Collector

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
2025-04-16 11:24:59 +05:30
Nityananda Gohain
e723399f7f fix: add check for empty services (#7611) 2025-04-15 16:39:33 +00:00
Nityananda Gohain
48936bed9b chore: multitenancy in integrations (#7507)
* chore: multitenancy in integrations

* chore: multitenancy in cloud integration accounts

* chore: changes to cloudintegrationservice

* chore: rename migration

* chore: update scan function

* chore: update scan function

* chore: fix migration

* chore: fix struct

* chore: remove unwanted code

* chore: update scan function

* chore: migrate user and pat for integrations

* fix: changes to the user for integrations

* fix: address comments

* fix: copy created_at

* fix: update non revoked token

* chore: don't allow deleting pat and user for integrations

* fix: address comments

* chore: address comments

* chore: add checks for fk in dialect

* fix: service migration

* fix: don't update user if user is already migrated

* fix: update correct service config

* fix: remove unwanted code

* fix: remove migration for multiple same services which is not required

* fix: fix migration and disable disaboard if metrics disabled

* fix: don't use ee types

---------

Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-04-15 15:35:36 +00:00
Srikanth Chekuri
ee70474cc7 fix: missing receivers in json payload for legacy postableAlert (#7603) 2025-04-14 13:20:39 +00:00
Srikanth Chekuri
c3fa7144ee chore: add tag type filter support in attribute keys (#7522) 2025-04-14 18:43:15 +05:30
127 changed files with 5867 additions and 1342 deletions

View File

@@ -1,6 +1,7 @@
.git
.github
.vscode
.devenv
README.md
deploy
sample-apps

View File

@@ -73,7 +73,7 @@ jobs:
uses: actions/cache@v4
with:
path: frontend/.env
key: dotenv-${{ github.sha }}
key: enterprise-dotenv-${{ github.sha }}
js-build:
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
needs: prepare
@@ -81,10 +81,10 @@ jobs:
with:
PRIMUS_REF: main
JS_SRC: frontend
JS_INPUT_ARTIFACT_CACHE_KEY: dotenv-${{ github.sha }}
JS_INPUT_ARTIFACT_CACHE_KEY: enterprise-dotenv-${{ github.sha }}
JS_INPUT_ARTIFACT_PATH: frontend/.env
JS_OUTPUT_ARTIFACT_CACHE_KEY: jsbuild-${{ github.sha }}
JS_OUTPUT_ARTIFACT_PATH: frontend/build
JS_OUTPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
JS_OUTPUT_ARTIFACT_PATH: frontend/build
DOCKER_BUILD: false
DOCKER_MANIFEST: false
go-build:
@@ -93,7 +93,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_INPUT_ARTIFACT_CACHE_KEY: jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build
GO_BUILD_CONTEXT: ./ee/query-service
GO_BUILD_FLAGS: >-

View File

@@ -64,22 +64,13 @@ jobs:
run: |
mkdir -p frontend
echo 'CI=1' > frontend/.env
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' >> frontend/.env
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
echo 'TUNNEL_URL=https://telemetry.staging.signoz.cloud/tunnel' >> frontend/.env
echo 'TUNNEL_DOMAIN=https://telemetry.staging.signoz.cloud' >> frontend/.env
- name: cache-dotenv
uses: actions/cache@v4
with:
path: frontend/.env
key: dotenv-${{ github.sha }}
key: staging-dotenv-${{ github.sha }}
js-build:
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
needs: prepare
@@ -87,9 +78,9 @@ jobs:
with:
PRIMUS_REF: main
JS_SRC: frontend
JS_INPUT_ARTIFACT_CACHE_KEY: dotenv-${{ github.sha }}
JS_INPUT_ARTIFACT_CACHE_KEY: staging-dotenv-${{ github.sha }}
JS_INPUT_ARTIFACT_PATH: frontend/.env
JS_OUTPUT_ARTIFACT_CACHE_KEY: jsbuild-${{ github.sha }}
JS_OUTPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
JS_OUTPUT_ARTIFACT_PATH: frontend/build
DOCKER_BUILD: false
DOCKER_MANIFEST: false
@@ -99,7 +90,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_INPUT_ARTIFACT_CACHE_KEY: jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build
GO_BUILD_CONTEXT: ./ee/query-service
GO_BUILD_FLAGS: >-

55
.github/workflows/integrationci.yaml vendored Normal file
View File

@@ -0,0 +1,55 @@
name: integrationci
on:
pull_request:
types:
- labeled
pull_request_target:
types:
- labeled
jobs:
test:
strategy:
fail-fast: false
matrix:
src:
- bootstrap
sqlstore-provider:
- postgres
- sqlite
clickhouse-version:
- 24.1.2-alpine
- 24.12-alpine
schema-migrator-version:
- v0.111.38
postgres-version:
- 15
if: |
((github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))) && contains(github.event.pull_request.labels.*.name, 'safe-to-integrate')
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v4
- name: python
uses: actions/setup-python@v5
with:
python-version: 3.13
- name: poetry
run: |
python -m pip install poetry==2.1.2
python -m poetry config virtualenvs.in-project true
cd tests/integration && poetry install --no-root
- name: run
run: |
cd tests/integration && \
poetry run pytest -ra \
--basetemp=./tmp/ \
-vv \
--capture=no \
src/${{matrix.src}} \
--sqlstore-provider ${{matrix.sqlstore-provider}} \
--postgres-version ${{matrix.postgres-version}} \
--clickhouse-version ${{matrix.clickhouse-version}} \
--schema-migrator-version ${{matrix.schema-migrator-version}}

View File

@@ -1,9 +1,9 @@
name: prereleaser
on:
# schedule every wednesday 9:30 AM UTC (3pm IST)
# schedule every wednesday 6:30 AM UTC (12:00 PM IST)
schedule:
- cron: '30 9 * * 3'
- cron: '30 6 * * 3'
# allow manual triggering of the workflow by a maintainer
workflow_dispatch:

147
.gitignore vendored
View File

@@ -80,6 +80,153 @@ deploy/common/clickhouse/user_scripts/
queries.active
# tmp
**/tmp/**
# .devenv tmp files
.devenv/**/tmp/**
.qodo
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
### Python Patch ###
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
poetry.toml
# ruff
.ruff_cache/
# LSP config files
pyrightconfig.json
# End of https://www.toptal.com/developers/gitignore/api/python

View File

@@ -10,7 +10,7 @@ COMMIT_SHORT_SHA ?= $(shell git rev-parse --short HEAD)
BRANCH_NAME ?= $(subst /,-,$(shell git rev-parse --abbrev-ref HEAD))
VERSION ?= $(BRANCH_NAME)-$(COMMIT_SHORT_SHA)
TIMESTAMP ?= $(shell date -u +"%Y-%m-%dT%H:%M:%SZ")
ARCHS = amd64 arm64
ARCHS ?= amd64 arm64
TARGET_DIR ?= $(shell pwd)/target
ZEUS_URL ?= https://api.signoz.cloud
@@ -23,6 +23,7 @@ GO_BUILD_ARCHS_COMMUNITY = $(addprefix go-build-community-,$(ARCHS))
GO_BUILD_CONTEXT_COMMUNITY = $(SRC)/pkg/query-service
GO_BUILD_LDFLAGS_COMMUNITY = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=community
GO_BUILD_ARCHS_ENTERPRISE = $(addprefix go-build-enterprise-,$(ARCHS))
GO_BUILD_ARCHS_ENTERPRISE_RACE = $(addprefix go-build-enterprise-race-,$(ARCHS))
GO_BUILD_CONTEXT_ENTERPRISE = $(SRC)/ee/query-service
GO_BUILD_LDFLAGS_ENTERPRISE = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=enterprise $(GO_BUILD_LDFLAG_ZEUS_URL) $(GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO)
@@ -119,6 +120,18 @@ $(GO_BUILD_ARCHS_ENTERPRISE): go-build-enterprise-%: $(TARGET_DIR)
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
fi
.PHONY: go-build-enterprise-race $(GO_BUILD_ARCHS_ENTERPRISE_RACE)
go-build-enterprise-race: ## Builds the go backend server for enterprise with race
go-build-enterprise-race: $(GO_BUILD_ARCHS_ENTERPRISE_RACE)
$(GO_BUILD_ARCHS_ENTERPRISE_RACE): go-build-enterprise-race-%: $(TARGET_DIR)
@mkdir -p $(TARGET_DIR)/$(OS)-$*
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)"
@if [ $* = "arm64" ]; then \
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
else \
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
fi
##############################################################
# js commands
##############################################################
@@ -167,3 +180,20 @@ docker-buildx-enterprise: go-build-enterprise js-build
--platform linux/arm64,linux/amd64 \
--push \
--tag $(DOCKER_REGISTRY_ENTERPRISE):$(VERSION) $(SRC)
##############################################################
# python commands
##############################################################
.PHONY: py-fmt
py-fmt: ## Run black for integration tests
@cd tests/integration && poetry run black .
.PHONY: py-lint
py-lint: ## Run lint for integration tests
@cd tests/integration && poetry run isort .
@cd tests/integration && poetry run autoflake .
@cd tests/integration && poetry run pylint .
.PHONY: py-test
py-test: ## Runs integration tests
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/

View File

@@ -174,7 +174,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.78.1
image: signoz/signoz:v0.79.1
command:
- --config=/root/config/prometheus.yml
- --use-logs-new-schema=true
@@ -208,7 +208,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.38
image: signoz/signoz-otel-collector:v0.111.39
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -232,7 +232,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.38
image: signoz/signoz-schema-migrator:v0.111.39
deploy:
restart_policy:
condition: on-failure

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.78.1
image: signoz/signoz:v0.79.1
command:
- --config=/root/config/prometheus.yml
- --use-logs-new-schema=true
@@ -143,7 +143,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.38
image: signoz/signoz-otel-collector:v0.111.39
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -167,7 +167,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.38
image: signoz/signoz-schema-migrator:v0.111.39
deploy:
restart_policy:
condition: on-failure

View File

@@ -177,7 +177,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.78.1}
image: signoz/signoz:${VERSION:-v0.79.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -212,7 +212,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.38}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -238,7 +238,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
container_name: schema-migrator-sync
command:
- sync
@@ -249,7 +249,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
container_name: schema-migrator-async
command:
- async

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.78.1}
image: signoz/signoz:${VERSION:-v0.79.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -146,7 +146,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.38}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -168,7 +168,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
container_name: schema-migrator-sync
command:
- sync
@@ -180,7 +180,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
container_name: schema-migrator-async
command:
- async

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.78.1}
image: signoz/signoz:${VERSION:-v0.79.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -144,7 +144,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.38}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -166,7 +166,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
container_name: schema-migrator-sync
command:
- sync
@@ -178,7 +178,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
container_name: schema-migrator-async
command:
- async

View File

@@ -0,0 +1,36 @@
FROM golang:1.22-bullseye
ARG OS="linux"
ARG TARGETARCH
ARG ZEUSURL
# This path is important for stacktraces
WORKDIR $GOPATH/src/github.com/signoz/signoz
WORKDIR /root
RUN set -eux; \
apt-get update; \
apt-get install -y --no-install-recommends \
g++ \
gcc \
libc6-dev \
make \
pkg-config \
; \
rm -rf /var/lib/apt/lists/*
COPY go.mod go.sum ./
RUN go mod download
COPY ./ee/ ./ee/
COPY ./pkg/ ./pkg/
COPY ./templates/email /root/templates
COPY Makefile Makefile
RUN TARGET_DIR=/root ARCHS=${TARGETARCH} ZEUS_URL=${ZEUSURL} LICENSE_URL=${ZEUSURL}/api/v1 make go-build-enterprise-race
RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz
RUN chmod 755 /root /root/signoz
ENTRYPOINT ["/root/signoz"]

View File

@@ -153,9 +153,11 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
ctx context.Context, orgId string, cloudProvider string,
) (*types.User, *basemodel.ApiError) {
cloudIntegrationUserId := fmt.Sprintf("%s-integration", cloudProvider)
cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider)
email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser)
integrationUserResult, apiErr := ah.AppDao().GetUser(ctx, cloudIntegrationUserId)
// TODO(nitya): there should be orgId here
integrationUserResult, apiErr := ah.AppDao().GetUserByEmail(ctx, email)
if apiErr != nil {
return nil, basemodel.WrapApiError(apiErr, "couldn't look for integration user")
}
@@ -170,9 +172,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
)
newUser := &types.User{
ID: cloudIntegrationUserId,
Name: fmt.Sprintf("%s integration", cloudProvider),
Email: fmt.Sprintf("%s@signoz.io", cloudIntegrationUserId),
ID: uuid.New().String(),
Name: cloudIntegrationUser,
Email: email,
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
},

View File

@@ -5,16 +5,18 @@ import (
"encoding/json"
"fmt"
"net/http"
"slices"
"time"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/ee/types"
eeTypes "github.com/SigNoz/signoz/ee/types"
"github.com/SigNoz/signoz/pkg/errors"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/query-service/auth"
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
"go.uber.org/zap"
@@ -58,7 +60,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
ah.Respond(w, &pat)
}
func validatePATRequest(req types.GettablePAT) error {
func validatePATRequest(req eeTypes.GettablePAT) error {
if req.Role == "" || (req.Role != baseconstants.ViewerGroup && req.Role != baseconstants.EditorGroup && req.Role != baseconstants.AdminGroup) {
return fmt.Errorf("valid role is required")
}
@@ -74,12 +76,19 @@ func validatePATRequest(req types.GettablePAT) error {
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
req := types.GettablePAT{}
req := eeTypes.GettablePAT{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
idStr := mux.Vars(r)["id"]
id, err := valuer.NewUUID(idStr)
if err != nil {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
return
}
user, err := auth.GetUserFromReqContext(r.Context())
if err != nil {
RespondError(w, &model.ApiError{
@@ -89,6 +98,25 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
return
}
//get the pat
existingPAT, paterr := ah.AppDao().GetPATByID(ctx, user.OrgID, id)
if paterr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
return
}
// get the user
createdByUser, usererr := ah.AppDao().GetUser(ctx, existingPAT.UserID)
if usererr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
return
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
return
}
err = validatePATRequest(req)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
@@ -96,12 +124,6 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
}
req.UpdatedByUserID = user.ID
idStr := mux.Vars(r)["id"]
id, err := valuer.NewUUID(idStr)
if err != nil {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
return
}
req.UpdatedAt = time.Now()
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
var apierr basemodel.BaseApiError
@@ -149,6 +171,25 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
return
}
//get the pat
existingPAT, paterr := ah.AppDao().GetPATByID(ctx, user.OrgID, id)
if paterr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
return
}
// get the user
createdByUser, usererr := ah.AppDao().GetUser(ctx, existingPAT.UserID)
if usererr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
return
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
return
}
zap.L().Info("Revoke PAT with id", zap.String("id", id.StringValue()))
if apierr := ah.AppDao().RevokePAT(ctx, user.OrgID, id, user.ID); apierr != nil {
RespondError(w, apierr, nil)

View File

@@ -8,7 +8,6 @@ import (
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
ossTypes "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
@@ -40,7 +39,6 @@ type ModelDao interface {
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError)
GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError)
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError
}

View File

@@ -43,7 +43,7 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
}
user := &types.User{
ID: uuid.NewString(),
ID: uuid.New().String(),
Name: "",
Email: email,
Password: hash,

View File

@@ -196,27 +196,3 @@ func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID)
return &patWithUser, nil
}
// deprecated
func (m *modelDao) GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError) {
users := []ossTypes.GettableUser{}
if err := m.DB().NewSelect().
Model(&users).
Column("u.id", "u.name", "u.email", "u.password", "u.created_at", "u.profile_picture_url", "u.org_id", "u.group_id").
Join("JOIN personal_access_tokens p ON u.id = p.user_id").
Where("p.token = ?", token).
Where("p.expires_at >= strftime('%s', 'now')").
Where("p.org_id = ?", orgID).
Scan(ctx); err != nil {
return nil, model.InternalError(fmt.Errorf("failed to fetch user from PAT, err: %v", err))
}
if len(users) != 1 {
return nil, &model.ApiError{
Typ: model.ErrorInternal,
Err: fmt.Errorf("found zero or multiple users with same PAT token"),
}
}
return &users[0], nil
}

View File

@@ -17,13 +17,15 @@ var (
)
var (
Org = "org"
User = "user"
Org = "org"
User = "user"
CloudIntegration = "cloud_integration"
)
var (
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
)
type dialect struct {
@@ -211,6 +213,8 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
fkReferences = append(fkReferences, OrgReference)
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
fkReferences = append(fkReferences, UserReference)
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
fkReferences = append(fkReferences, CloudIntegrationReference)
}
}

6
go.mod
View File

@@ -10,7 +10,8 @@ require (
github.com/ClickHouse/clickhouse-go/v2 v2.30.0
github.com/DATA-DOG/go-sqlmock v1.5.2
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
github.com/SigNoz/signoz-otel-collector v0.111.16
github.com/SigNoz/signoz-otel-collector v0.111.39
github.com/antlr4-go/antlr/v4 v4.13.1
github.com/antonmedv/expr v1.15.3
github.com/cespare/xxhash/v2 v2.3.0
github.com/coreos/go-oidc/v3 v3.11.0
@@ -89,10 +90,9 @@ require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect
github.com/ClickHouse/ch-go v0.61.5 // indirect
github.com/ClickHouse/ch-go v0.63.1 // indirect
github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b // indirect
github.com/andybalholm/brotli v1.1.1 // indirect
github.com/antlr4-go/antlr/v4 v4.13.1 // indirect
github.com/armon/go-metrics v0.4.1 // indirect
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
github.com/aws/aws-sdk-go v1.55.5 // indirect

14
go.sum
View File

@@ -85,8 +85,8 @@ github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mx
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/ClickHouse/ch-go v0.61.5 h1:zwR8QbYI0tsMiEcze/uIMK+Tz1D3XZXLdNrlaOpeEI4=
github.com/ClickHouse/ch-go v0.61.5/go.mod h1:s1LJW/F/LcFs5HJnuogFMta50kKDO0lf9zzfrbl0RQg=
github.com/ClickHouse/ch-go v0.63.1 h1:s2JyZvWLTCSAGdtjMBBmAgQQHMco6pawLJMOXi0FODM=
github.com/ClickHouse/ch-go v0.63.1/go.mod h1:I1kJJCL3WJcBMGe1m+HVK0+nREaG+JOYYBWjrDrF3R0=
github.com/ClickHouse/clickhouse-go/v2 v2.30.0 h1:AG4D/hW39qa58+JHQIFOSnxyL46H6h2lrmGGk17dhFo=
github.com/ClickHouse/clickhouse-go/v2 v2.30.0/go.mod h1:i9ZQAojcayW3RsdCb3YR+n+wC2h65eJsZCscZ1Z1wyo=
github.com/Code-Hex/go-generics-cache v1.5.1 h1:6vhZGc5M7Y/YD8cIUcY8kcuQLB4cHR7U+0KMqAA0KcU=
@@ -100,8 +100,10 @@ github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8=
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
github.com/SigNoz/signoz-otel-collector v0.111.16 h1:535uKH5Oux+35EsI+L3C6pnAP/Ye0PTCbVizXoL+VqE=
github.com/SigNoz/signoz-otel-collector v0.111.16/go.mod h1:HJ4m0LY1MPsuZmuRF7Ixb+bY8rxgRzI0VXzOedESsjg=
github.com/SigNoz/signoz-otel-collector v0.111.39-beta.1 h1:ZpSNrOZBOH2iCJIPeER5X0mfxOe64yP3JRX7FzBNfwY=
github.com/SigNoz/signoz-otel-collector v0.111.39-beta.1/go.mod h1:DCu/D+lqhsPNSGS4IMD+4gn7q06TGzOCKazSy+GURVc=
github.com/SigNoz/signoz-otel-collector v0.111.39 h1:Dl8QqZNAsj2atxP572OzsszPK0XPpd3LLPNPRAUJ5wo=
github.com/SigNoz/signoz-otel-collector v0.111.39/go.mod h1:DCu/D+lqhsPNSGS4IMD+4gn7q06TGzOCKazSy+GURVc=
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
@@ -820,8 +822,8 @@ github.com/prometheus/prometheus v0.300.1/go.mod h1:gtTPY/XVyCdqqnjA3NzDMb0/nc5H
github.com/puzpuzpuz/xsync/v3 v3.5.0 h1:i+cMcpEDY1BkNm7lPDkCtE4oElsYLn+EKF8kAu2vXT4=
github.com/puzpuzpuz/xsync/v3 v3.5.0/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/redis/go-redis/v9 v9.6.1 h1:HHDteefn6ZkTtY5fGUE8tj8uy85AHk6zP7CpzIAM0y4=
github.com/redis/go-redis/v9 v9.6.1/go.mod h1:0C0c6ycQsdpVNQpxb1njEQIqkx5UcsM8FJCQLgE9+RA=
github.com/redis/go-redis/v9 v9.6.3 h1:8Dr5ygF1QFXRxIH/m3Xg9MMG1rS8YCtAgosrsewT6i0=
github.com/redis/go-redis/v9 v9.6.3/go.mod h1:0C0c6ycQsdpVNQpxb1njEQIqkx5UcsM8FJCQLgE9+RA=
github.com/rhnvrm/simples3 v0.6.1/go.mod h1:Y+3vYm2V7Y4VijFoJHHTrja6OgPrJ2cBti8dPGkC3sA=
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=

View File

@@ -104,7 +104,7 @@ fullText
* ...
*/
functionCall
: (HAS | HASANY | HASALL | HASNONE) LPAREN functionParamList RPAREN
: (HAS | HASANY | HASALL) LPAREN functionParamList RPAREN
;
// Function parameters can be keys, single scalar values, or arrays
@@ -182,7 +182,6 @@ OR : [Oo][Rr] ;
HAS : [Hh][Aa][Ss] ;
HASANY : [Hh][Aa][Ss][Aa][Nn][Yy] ;
HASALL : [Hh][Aa][Ss][Aa][Ll][Ll] ;
HASNONE : [Hh][Aa][Ss][Nn][Oo][Nn][Ee] ;
// Potential boolean constants
BOOL
@@ -205,7 +204,7 @@ QUOTED_TEXT
// Keys can have letters, digits, underscores, dots, and bracket pairs
// e.g. service.name, service.namespace, db.queries[].query_duration
KEY
: [a-zA-Z0-9_] [a-zA-Z0-9_.[\]]*
: [a-zA-Z0-9_] [a-zA-Z0-9_.*[\]]*
;
// Ignore whitespace
@@ -218,4 +217,4 @@ fragment DIGIT
: [0-9]
;
FREETEXT : (~[ \t\r\n=()'"<>![\]])+ ;
FREETEXT : (~[ \t\r\n=()'"<>!,[\]])+ ;

View File

@@ -25,6 +25,25 @@ type postableAlert struct {
Receivers []string `json:"receivers"`
}
func (pa *postableAlert) MarshalJSON() ([]byte, error) {
// Marshal the embedded PostableAlert to get its JSON representation.
alertJSON, err := json.Marshal(pa.PostableAlert)
if err != nil {
return nil, err
}
// Unmarshal that JSON into a map so we can add extra fields.
var m map[string]interface{}
if err := json.Unmarshal(alertJSON, &m); err != nil {
return nil, err
}
// Add the Receivers field.
m["receivers"] = pa.Receivers
return json.Marshal(m)
}
const (
alertsPath string = "/v1/alerts"
routesPath string = "/v1/routes"

View File

@@ -0,0 +1,35 @@
package legacyalertmanager
import (
"encoding/json"
"testing"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/prometheus/alertmanager/api/v2/models"
"github.com/stretchr/testify/assert"
)
func TestProvider_TestAlert(t *testing.T) {
pa := &postableAlert{
PostableAlert: &alertmanagertypes.PostableAlert{
Alert: models.Alert{
Labels: models.LabelSet{
"alertname": "test",
},
GeneratorURL: "http://localhost:9090/graph?g0.expr=up&g0.tab=1",
},
Annotations: models.LabelSet{
"summary": "test",
},
},
Receivers: []string{"receiver1", "receiver2"},
}
body, err := json.Marshal(pa)
if err != nil {
t.Fatalf("failed to marshal postable alert: %v", err)
}
assert.Contains(t, string(body), "receiver1")
assert.Contains(t, string(body), "receiver2")
}

File diff suppressed because one or more lines are too long

View File

@@ -25,13 +25,12 @@ OR=24
HAS=25
HASANY=26
HASALL=27
HASNONE=28
BOOL=29
NUMBER=30
QUOTED_TEXT=31
KEY=32
WS=33
FREETEXT=34
BOOL=28
NUMBER=29
QUOTED_TEXT=30
KEY=31
WS=32
FREETEXT=33
'('=1
')'=2
'['=3

File diff suppressed because one or more lines are too long

View File

@@ -25,13 +25,12 @@ OR=24
HAS=25
HASANY=26
HASALL=27
HASNONE=28
BOOL=29
NUMBER=30
QUOTED_TEXT=31
KEY=32
WS=33
FREETEXT=34
BOOL=28
NUMBER=29
QUOTED_TEXT=30
KEY=31
WS=32
FREETEXT=33
'('=1
')'=2
'['=3

View File

@@ -50,150 +50,146 @@ func filterquerylexerLexerInit() {
"", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
"HAS", "HASANY", "HASALL", "HASNONE", "BOOL", "NUMBER", "QUOTED_TEXT",
"KEY", "WS", "FREETEXT",
"HAS", "HASANY", "HASALL", "BOOL", "NUMBER", "QUOTED_TEXT", "KEY", "WS",
"FREETEXT",
}
staticData.RuleNames = []string{
"LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
"HAS", "HASANY", "HASALL", "HASNONE", "BOOL", "NUMBER", "QUOTED_TEXT",
"KEY", "WS", "DIGIT", "FREETEXT",
"HAS", "HASANY", "HASALL", "BOOL", "NUMBER", "QUOTED_TEXT", "KEY", "WS",
"DIGIT", "FREETEXT",
}
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 0, 34, 280, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
4, 0, 33, 270, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 1, 0, 1, 0, 1, 1,
1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 85, 8,
5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1,
10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13,
1, 13, 1, 13, 1, 13, 4, 13, 112, 8, 13, 11, 13, 12, 13, 113, 1, 13, 1,
13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15,
1, 15, 1, 15, 1, 15, 4, 15, 131, 8, 15, 11, 15, 12, 15, 132, 1, 15, 1,
15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16,
1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 155, 8,
17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19,
1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 172, 8, 19, 1, 20, 1, 20, 1,
20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23,
1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1,
25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27,
1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1,
28, 1, 28, 1, 28, 1, 28, 1, 28, 3, 28, 223, 8, 28, 1, 29, 4, 29, 226, 8,
29, 11, 29, 12, 29, 227, 1, 29, 1, 29, 4, 29, 232, 8, 29, 11, 29, 12, 29,
233, 3, 29, 236, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 242, 8, 30,
10, 30, 12, 30, 245, 9, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 252,
8, 30, 10, 30, 12, 30, 255, 9, 30, 1, 30, 3, 30, 258, 8, 30, 1, 31, 1,
31, 5, 31, 262, 8, 31, 10, 31, 12, 31, 265, 9, 31, 1, 32, 4, 32, 268, 8,
32, 11, 32, 12, 32, 269, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 4, 34, 277,
8, 34, 11, 34, 12, 34, 278, 0, 0, 35, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11,
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1,
2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 83, 8, 5, 1, 6, 1, 6,
1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1,
11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13,
1, 13, 4, 13, 110, 8, 13, 11, 13, 12, 13, 111, 1, 13, 1, 13, 1, 13, 1,
13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15,
1, 15, 4, 15, 129, 8, 15, 11, 15, 12, 15, 130, 1, 15, 1, 15, 1, 15, 1,
15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16,
1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 153, 8, 17, 1, 18, 1,
18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19,
1, 19, 1, 19, 1, 19, 3, 19, 170, 8, 19, 1, 20, 1, 20, 1, 20, 1, 21, 1,
21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 24,
1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1,
26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27,
1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 3, 27, 213, 8, 27, 1, 28, 4, 28, 216,
8, 28, 11, 28, 12, 28, 217, 1, 28, 1, 28, 4, 28, 222, 8, 28, 11, 28, 12,
28, 223, 3, 28, 226, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 232, 8,
29, 10, 29, 12, 29, 235, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29,
242, 8, 29, 10, 29, 12, 29, 245, 9, 29, 1, 29, 3, 29, 248, 8, 29, 1, 30,
1, 30, 5, 30, 252, 8, 30, 10, 30, 12, 30, 255, 9, 30, 1, 31, 4, 31, 258,
8, 31, 11, 31, 12, 31, 259, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 4, 33, 267,
8, 33, 11, 33, 12, 33, 268, 0, 0, 34, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11,
6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15,
31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24,
49, 25, 51, 26, 53, 27, 55, 28, 57, 29, 59, 30, 61, 31, 63, 32, 65, 33,
67, 0, 69, 34, 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105,
2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 78, 78, 110, 110,
2, 0, 79, 79, 111, 111, 2, 0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32, 2,
0, 66, 66, 98, 98, 2, 0, 87, 87, 119, 119, 2, 0, 88, 88, 120, 120, 2, 0,
83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0,
80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0, 68,
68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85,
85, 117, 117, 2, 0, 70, 70, 102, 102, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39,
92, 92, 4, 0, 48, 57, 65, 90, 95, 95, 97, 122, 6, 0, 46, 46, 48, 57, 65,
49, 25, 51, 26, 53, 27, 55, 28, 57, 29, 59, 30, 61, 31, 63, 32, 65, 0,
67, 33, 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0,
75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 78, 78, 110, 110, 2, 0,
79, 79, 111, 111, 2, 0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32, 2, 0, 66,
66, 98, 98, 2, 0, 87, 87, 119, 119, 2, 0, 88, 88, 120, 120, 2, 0, 83, 83,
115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0, 80, 80,
112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100,
100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117,
117, 2, 0, 70, 70, 102, 102, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92,
4, 0, 48, 57, 65, 90, 95, 95, 97, 122, 7, 0, 42, 42, 46, 46, 48, 57, 65,
91, 93, 93, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57,
7, 0, 9, 10, 13, 13, 32, 34, 39, 41, 60, 62, 91, 91, 93, 93, 295, 0, 1,
1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9,
1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0,
17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0,
0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0,
0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0,
0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1,
0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55,
1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0,
63, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 1, 71, 1, 0, 0, 0,
3, 73, 1, 0, 0, 0, 5, 75, 1, 0, 0, 0, 7, 77, 1, 0, 0, 0, 9, 79, 1, 0, 0,
0, 11, 84, 1, 0, 0, 0, 13, 86, 1, 0, 0, 0, 15, 89, 1, 0, 0, 0, 17, 92,
1, 0, 0, 0, 19, 94, 1, 0, 0, 0, 21, 97, 1, 0, 0, 0, 23, 99, 1, 0, 0, 0,
25, 102, 1, 0, 0, 0, 27, 107, 1, 0, 0, 0, 29, 120, 1, 0, 0, 0, 31, 126,
1, 0, 0, 0, 33, 140, 1, 0, 0, 0, 35, 148, 1, 0, 0, 0, 37, 156, 1, 0, 0,
0, 39, 163, 1, 0, 0, 0, 41, 173, 1, 0, 0, 0, 43, 176, 1, 0, 0, 0, 45, 180,
1, 0, 0, 0, 47, 184, 1, 0, 0, 0, 49, 187, 1, 0, 0, 0, 51, 191, 1, 0, 0,
0, 53, 198, 1, 0, 0, 0, 55, 205, 1, 0, 0, 0, 57, 222, 1, 0, 0, 0, 59, 225,
1, 0, 0, 0, 61, 257, 1, 0, 0, 0, 63, 259, 1, 0, 0, 0, 65, 267, 1, 0, 0,
0, 67, 273, 1, 0, 0, 0, 69, 276, 1, 0, 0, 0, 71, 72, 5, 40, 0, 0, 72, 2,
1, 0, 0, 0, 73, 74, 5, 41, 0, 0, 74, 4, 1, 0, 0, 0, 75, 76, 5, 91, 0, 0,
76, 6, 1, 0, 0, 0, 77, 78, 5, 93, 0, 0, 78, 8, 1, 0, 0, 0, 79, 80, 5, 44,
0, 0, 80, 10, 1, 0, 0, 0, 81, 85, 5, 61, 0, 0, 82, 83, 5, 61, 0, 0, 83,
85, 5, 61, 0, 0, 84, 81, 1, 0, 0, 0, 84, 82, 1, 0, 0, 0, 85, 12, 1, 0,
0, 0, 86, 87, 5, 33, 0, 0, 87, 88, 5, 61, 0, 0, 88, 14, 1, 0, 0, 0, 89,
90, 5, 60, 0, 0, 90, 91, 5, 62, 0, 0, 91, 16, 1, 0, 0, 0, 92, 93, 5, 60,
0, 0, 93, 18, 1, 0, 0, 0, 94, 95, 5, 60, 0, 0, 95, 96, 5, 61, 0, 0, 96,
20, 1, 0, 0, 0, 97, 98, 5, 62, 0, 0, 98, 22, 1, 0, 0, 0, 99, 100, 5, 62,
0, 0, 100, 101, 5, 61, 0, 0, 101, 24, 1, 0, 0, 0, 102, 103, 7, 0, 0, 0,
103, 104, 7, 1, 0, 0, 104, 105, 7, 2, 0, 0, 105, 106, 7, 3, 0, 0, 106,
26, 1, 0, 0, 0, 107, 108, 7, 4, 0, 0, 108, 109, 7, 5, 0, 0, 109, 111, 7,
6, 0, 0, 110, 112, 7, 7, 0, 0, 111, 110, 1, 0, 0, 0, 112, 113, 1, 0, 0,
0, 113, 111, 1, 0, 0, 0, 113, 114, 1, 0, 0, 0, 114, 115, 1, 0, 0, 0, 115,
116, 7, 0, 0, 0, 116, 117, 7, 1, 0, 0, 117, 118, 7, 2, 0, 0, 118, 119,
7, 3, 0, 0, 119, 28, 1, 0, 0, 0, 120, 121, 7, 1, 0, 0, 121, 122, 7, 0,
0, 0, 122, 123, 7, 1, 0, 0, 123, 124, 7, 2, 0, 0, 124, 125, 7, 3, 0, 0,
125, 30, 1, 0, 0, 0, 126, 127, 7, 4, 0, 0, 127, 128, 7, 5, 0, 0, 128, 130,
7, 6, 0, 0, 129, 131, 7, 7, 0, 0, 130, 129, 1, 0, 0, 0, 131, 132, 1, 0,
0, 0, 132, 130, 1, 0, 0, 0, 132, 133, 1, 0, 0, 0, 133, 134, 1, 0, 0, 0,
134, 135, 7, 1, 0, 0, 135, 136, 7, 0, 0, 0, 136, 137, 7, 1, 0, 0, 137,
138, 7, 2, 0, 0, 138, 139, 7, 3, 0, 0, 139, 32, 1, 0, 0, 0, 140, 141, 7,
8, 0, 0, 141, 142, 7, 3, 0, 0, 142, 143, 7, 6, 0, 0, 143, 144, 7, 9, 0,
0, 144, 145, 7, 3, 0, 0, 145, 146, 7, 3, 0, 0, 146, 147, 7, 4, 0, 0, 147,
34, 1, 0, 0, 0, 148, 149, 7, 3, 0, 0, 149, 150, 7, 10, 0, 0, 150, 151,
7, 1, 0, 0, 151, 152, 7, 11, 0, 0, 152, 154, 7, 6, 0, 0, 153, 155, 7, 11,
0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 36, 1, 0, 0, 0,
156, 157, 7, 12, 0, 0, 157, 158, 7, 3, 0, 0, 158, 159, 7, 13, 0, 0, 159,
160, 7, 3, 0, 0, 160, 161, 7, 10, 0, 0, 161, 162, 7, 14, 0, 0, 162, 38,
1, 0, 0, 0, 163, 164, 7, 15, 0, 0, 164, 165, 7, 5, 0, 0, 165, 166, 7, 4,
0, 0, 166, 167, 7, 6, 0, 0, 167, 168, 7, 16, 0, 0, 168, 169, 7, 1, 0, 0,
169, 171, 7, 4, 0, 0, 170, 172, 7, 11, 0, 0, 171, 170, 1, 0, 0, 0, 171,
172, 1, 0, 0, 0, 172, 40, 1, 0, 0, 0, 173, 174, 7, 1, 0, 0, 174, 175, 7,
4, 0, 0, 175, 42, 1, 0, 0, 0, 176, 177, 7, 4, 0, 0, 177, 178, 7, 5, 0,
0, 178, 179, 7, 6, 0, 0, 179, 44, 1, 0, 0, 0, 180, 181, 7, 16, 0, 0, 181,
182, 7, 4, 0, 0, 182, 183, 7, 17, 0, 0, 183, 46, 1, 0, 0, 0, 184, 185,
7, 5, 0, 0, 185, 186, 7, 12, 0, 0, 186, 48, 1, 0, 0, 0, 187, 188, 7, 18,
0, 0, 188, 189, 7, 16, 0, 0, 189, 190, 7, 11, 0, 0, 190, 50, 1, 0, 0, 0,
191, 192, 7, 18, 0, 0, 192, 193, 7, 16, 0, 0, 193, 194, 7, 11, 0, 0, 194,
195, 7, 16, 0, 0, 195, 196, 7, 4, 0, 0, 196, 197, 7, 19, 0, 0, 197, 52,
1, 0, 0, 0, 198, 199, 7, 18, 0, 0, 199, 200, 7, 16, 0, 0, 200, 201, 7,
11, 0, 0, 201, 202, 7, 16, 0, 0, 202, 203, 7, 0, 0, 0, 203, 204, 7, 0,
0, 0, 204, 54, 1, 0, 0, 0, 205, 206, 7, 18, 0, 0, 206, 207, 7, 16, 0, 0,
207, 208, 7, 11, 0, 0, 208, 209, 7, 4, 0, 0, 209, 210, 7, 5, 0, 0, 210,
211, 7, 4, 0, 0, 211, 212, 7, 3, 0, 0, 212, 56, 1, 0, 0, 0, 213, 214, 7,
6, 0, 0, 214, 215, 7, 12, 0, 0, 215, 216, 7, 20, 0, 0, 216, 223, 7, 3,
0, 0, 217, 218, 7, 21, 0, 0, 218, 219, 7, 16, 0, 0, 219, 220, 7, 0, 0,
0, 220, 221, 7, 11, 0, 0, 221, 223, 7, 3, 0, 0, 222, 213, 1, 0, 0, 0, 222,
217, 1, 0, 0, 0, 223, 58, 1, 0, 0, 0, 224, 226, 3, 67, 33, 0, 225, 224,
1, 0, 0, 0, 226, 227, 1, 0, 0, 0, 227, 225, 1, 0, 0, 0, 227, 228, 1, 0,
0, 0, 228, 235, 1, 0, 0, 0, 229, 231, 5, 46, 0, 0, 230, 232, 3, 67, 33,
0, 231, 230, 1, 0, 0, 0, 232, 233, 1, 0, 0, 0, 233, 231, 1, 0, 0, 0, 233,
234, 1, 0, 0, 0, 234, 236, 1, 0, 0, 0, 235, 229, 1, 0, 0, 0, 235, 236,
1, 0, 0, 0, 236, 60, 1, 0, 0, 0, 237, 243, 5, 34, 0, 0, 238, 242, 8, 22,
0, 0, 239, 240, 5, 92, 0, 0, 240, 242, 9, 0, 0, 0, 241, 238, 1, 0, 0, 0,
241, 239, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243,
244, 1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 258,
5, 34, 0, 0, 247, 253, 5, 39, 0, 0, 248, 252, 8, 23, 0, 0, 249, 250, 5,
92, 0, 0, 250, 252, 9, 0, 0, 0, 251, 248, 1, 0, 0, 0, 251, 249, 1, 0, 0,
0, 252, 255, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254,
256, 1, 0, 0, 0, 255, 253, 1, 0, 0, 0, 256, 258, 5, 39, 0, 0, 257, 237,
1, 0, 0, 0, 257, 247, 1, 0, 0, 0, 258, 62, 1, 0, 0, 0, 259, 263, 7, 24,
0, 0, 260, 262, 7, 25, 0, 0, 261, 260, 1, 0, 0, 0, 262, 265, 1, 0, 0, 0,
263, 261, 1, 0, 0, 0, 263, 264, 1, 0, 0, 0, 264, 64, 1, 0, 0, 0, 265, 263,
1, 0, 0, 0, 266, 268, 7, 26, 0, 0, 267, 266, 1, 0, 0, 0, 268, 269, 1, 0,
0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0,
271, 272, 6, 32, 0, 0, 272, 66, 1, 0, 0, 0, 273, 274, 7, 27, 0, 0, 274,
68, 1, 0, 0, 0, 275, 277, 8, 28, 0, 0, 276, 275, 1, 0, 0, 0, 277, 278,
1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 70, 1, 0,
0, 0, 18, 0, 84, 113, 132, 154, 171, 222, 227, 233, 235, 241, 243, 251,
253, 257, 263, 269, 278, 1, 6, 0, 0,
8, 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 285,
0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0,
0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0,
0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0,
0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1,
0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39,
1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0,
47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0,
0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0,
0, 0, 63, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 1, 69, 1, 0, 0, 0, 3, 71, 1, 0,
0, 0, 5, 73, 1, 0, 0, 0, 7, 75, 1, 0, 0, 0, 9, 77, 1, 0, 0, 0, 11, 82,
1, 0, 0, 0, 13, 84, 1, 0, 0, 0, 15, 87, 1, 0, 0, 0, 17, 90, 1, 0, 0, 0,
19, 92, 1, 0, 0, 0, 21, 95, 1, 0, 0, 0, 23, 97, 1, 0, 0, 0, 25, 100, 1,
0, 0, 0, 27, 105, 1, 0, 0, 0, 29, 118, 1, 0, 0, 0, 31, 124, 1, 0, 0, 0,
33, 138, 1, 0, 0, 0, 35, 146, 1, 0, 0, 0, 37, 154, 1, 0, 0, 0, 39, 161,
1, 0, 0, 0, 41, 171, 1, 0, 0, 0, 43, 174, 1, 0, 0, 0, 45, 178, 1, 0, 0,
0, 47, 182, 1, 0, 0, 0, 49, 185, 1, 0, 0, 0, 51, 189, 1, 0, 0, 0, 53, 196,
1, 0, 0, 0, 55, 212, 1, 0, 0, 0, 57, 215, 1, 0, 0, 0, 59, 247, 1, 0, 0,
0, 61, 249, 1, 0, 0, 0, 63, 257, 1, 0, 0, 0, 65, 263, 1, 0, 0, 0, 67, 266,
1, 0, 0, 0, 69, 70, 5, 40, 0, 0, 70, 2, 1, 0, 0, 0, 71, 72, 5, 41, 0, 0,
72, 4, 1, 0, 0, 0, 73, 74, 5, 91, 0, 0, 74, 6, 1, 0, 0, 0, 75, 76, 5, 93,
0, 0, 76, 8, 1, 0, 0, 0, 77, 78, 5, 44, 0, 0, 78, 10, 1, 0, 0, 0, 79, 83,
5, 61, 0, 0, 80, 81, 5, 61, 0, 0, 81, 83, 5, 61, 0, 0, 82, 79, 1, 0, 0,
0, 82, 80, 1, 0, 0, 0, 83, 12, 1, 0, 0, 0, 84, 85, 5, 33, 0, 0, 85, 86,
5, 61, 0, 0, 86, 14, 1, 0, 0, 0, 87, 88, 5, 60, 0, 0, 88, 89, 5, 62, 0,
0, 89, 16, 1, 0, 0, 0, 90, 91, 5, 60, 0, 0, 91, 18, 1, 0, 0, 0, 92, 93,
5, 60, 0, 0, 93, 94, 5, 61, 0, 0, 94, 20, 1, 0, 0, 0, 95, 96, 5, 62, 0,
0, 96, 22, 1, 0, 0, 0, 97, 98, 5, 62, 0, 0, 98, 99, 5, 61, 0, 0, 99, 24,
1, 0, 0, 0, 100, 101, 7, 0, 0, 0, 101, 102, 7, 1, 0, 0, 102, 103, 7, 2,
0, 0, 103, 104, 7, 3, 0, 0, 104, 26, 1, 0, 0, 0, 105, 106, 7, 4, 0, 0,
106, 107, 7, 5, 0, 0, 107, 109, 7, 6, 0, 0, 108, 110, 7, 7, 0, 0, 109,
108, 1, 0, 0, 0, 110, 111, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 111, 112,
1, 0, 0, 0, 112, 113, 1, 0, 0, 0, 113, 114, 7, 0, 0, 0, 114, 115, 7, 1,
0, 0, 115, 116, 7, 2, 0, 0, 116, 117, 7, 3, 0, 0, 117, 28, 1, 0, 0, 0,
118, 119, 7, 1, 0, 0, 119, 120, 7, 0, 0, 0, 120, 121, 7, 1, 0, 0, 121,
122, 7, 2, 0, 0, 122, 123, 7, 3, 0, 0, 123, 30, 1, 0, 0, 0, 124, 125, 7,
4, 0, 0, 125, 126, 7, 5, 0, 0, 126, 128, 7, 6, 0, 0, 127, 129, 7, 7, 0,
0, 128, 127, 1, 0, 0, 0, 129, 130, 1, 0, 0, 0, 130, 128, 1, 0, 0, 0, 130,
131, 1, 0, 0, 0, 131, 132, 1, 0, 0, 0, 132, 133, 7, 1, 0, 0, 133, 134,
7, 0, 0, 0, 134, 135, 7, 1, 0, 0, 135, 136, 7, 2, 0, 0, 136, 137, 7, 3,
0, 0, 137, 32, 1, 0, 0, 0, 138, 139, 7, 8, 0, 0, 139, 140, 7, 3, 0, 0,
140, 141, 7, 6, 0, 0, 141, 142, 7, 9, 0, 0, 142, 143, 7, 3, 0, 0, 143,
144, 7, 3, 0, 0, 144, 145, 7, 4, 0, 0, 145, 34, 1, 0, 0, 0, 146, 147, 7,
3, 0, 0, 147, 148, 7, 10, 0, 0, 148, 149, 7, 1, 0, 0, 149, 150, 7, 11,
0, 0, 150, 152, 7, 6, 0, 0, 151, 153, 7, 11, 0, 0, 152, 151, 1, 0, 0, 0,
152, 153, 1, 0, 0, 0, 153, 36, 1, 0, 0, 0, 154, 155, 7, 12, 0, 0, 155,
156, 7, 3, 0, 0, 156, 157, 7, 13, 0, 0, 157, 158, 7, 3, 0, 0, 158, 159,
7, 10, 0, 0, 159, 160, 7, 14, 0, 0, 160, 38, 1, 0, 0, 0, 161, 162, 7, 15,
0, 0, 162, 163, 7, 5, 0, 0, 163, 164, 7, 4, 0, 0, 164, 165, 7, 6, 0, 0,
165, 166, 7, 16, 0, 0, 166, 167, 7, 1, 0, 0, 167, 169, 7, 4, 0, 0, 168,
170, 7, 11, 0, 0, 169, 168, 1, 0, 0, 0, 169, 170, 1, 0, 0, 0, 170, 40,
1, 0, 0, 0, 171, 172, 7, 1, 0, 0, 172, 173, 7, 4, 0, 0, 173, 42, 1, 0,
0, 0, 174, 175, 7, 4, 0, 0, 175, 176, 7, 5, 0, 0, 176, 177, 7, 6, 0, 0,
177, 44, 1, 0, 0, 0, 178, 179, 7, 16, 0, 0, 179, 180, 7, 4, 0, 0, 180,
181, 7, 17, 0, 0, 181, 46, 1, 0, 0, 0, 182, 183, 7, 5, 0, 0, 183, 184,
7, 12, 0, 0, 184, 48, 1, 0, 0, 0, 185, 186, 7, 18, 0, 0, 186, 187, 7, 16,
0, 0, 187, 188, 7, 11, 0, 0, 188, 50, 1, 0, 0, 0, 189, 190, 7, 18, 0, 0,
190, 191, 7, 16, 0, 0, 191, 192, 7, 11, 0, 0, 192, 193, 7, 16, 0, 0, 193,
194, 7, 4, 0, 0, 194, 195, 7, 19, 0, 0, 195, 52, 1, 0, 0, 0, 196, 197,
7, 18, 0, 0, 197, 198, 7, 16, 0, 0, 198, 199, 7, 11, 0, 0, 199, 200, 7,
16, 0, 0, 200, 201, 7, 0, 0, 0, 201, 202, 7, 0, 0, 0, 202, 54, 1, 0, 0,
0, 203, 204, 7, 6, 0, 0, 204, 205, 7, 12, 0, 0, 205, 206, 7, 20, 0, 0,
206, 213, 7, 3, 0, 0, 207, 208, 7, 21, 0, 0, 208, 209, 7, 16, 0, 0, 209,
210, 7, 0, 0, 0, 210, 211, 7, 11, 0, 0, 211, 213, 7, 3, 0, 0, 212, 203,
1, 0, 0, 0, 212, 207, 1, 0, 0, 0, 213, 56, 1, 0, 0, 0, 214, 216, 3, 65,
32, 0, 215, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 215, 1, 0, 0, 0,
217, 218, 1, 0, 0, 0, 218, 225, 1, 0, 0, 0, 219, 221, 5, 46, 0, 0, 220,
222, 3, 65, 32, 0, 221, 220, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 221,
1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 226, 1, 0, 0, 0, 225, 219, 1, 0,
0, 0, 225, 226, 1, 0, 0, 0, 226, 58, 1, 0, 0, 0, 227, 233, 5, 34, 0, 0,
228, 232, 8, 22, 0, 0, 229, 230, 5, 92, 0, 0, 230, 232, 9, 0, 0, 0, 231,
228, 1, 0, 0, 0, 231, 229, 1, 0, 0, 0, 232, 235, 1, 0, 0, 0, 233, 231,
1, 0, 0, 0, 233, 234, 1, 0, 0, 0, 234, 236, 1, 0, 0, 0, 235, 233, 1, 0,
0, 0, 236, 248, 5, 34, 0, 0, 237, 243, 5, 39, 0, 0, 238, 242, 8, 23, 0,
0, 239, 240, 5, 92, 0, 0, 240, 242, 9, 0, 0, 0, 241, 238, 1, 0, 0, 0, 241,
239, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244,
1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 248, 5, 39,
0, 0, 247, 227, 1, 0, 0, 0, 247, 237, 1, 0, 0, 0, 248, 60, 1, 0, 0, 0,
249, 253, 7, 24, 0, 0, 250, 252, 7, 25, 0, 0, 251, 250, 1, 0, 0, 0, 252,
255, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254, 62, 1,
0, 0, 0, 255, 253, 1, 0, 0, 0, 256, 258, 7, 26, 0, 0, 257, 256, 1, 0, 0,
0, 258, 259, 1, 0, 0, 0, 259, 257, 1, 0, 0, 0, 259, 260, 1, 0, 0, 0, 260,
261, 1, 0, 0, 0, 261, 262, 6, 31, 0, 0, 262, 64, 1, 0, 0, 0, 263, 264,
7, 27, 0, 0, 264, 66, 1, 0, 0, 0, 265, 267, 8, 28, 0, 0, 266, 265, 1, 0,
0, 0, 267, 268, 1, 0, 0, 0, 268, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0,
269, 68, 1, 0, 0, 0, 18, 0, 82, 111, 130, 152, 169, 212, 217, 223, 225,
231, 233, 241, 243, 247, 253, 259, 268, 1, 6, 0, 0,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
@@ -261,11 +257,10 @@ const (
FilterQueryLexerHAS = 25
FilterQueryLexerHASANY = 26
FilterQueryLexerHASALL = 27
FilterQueryLexerHASNONE = 28
FilterQueryLexerBOOL = 29
FilterQueryLexerNUMBER = 30
FilterQueryLexerQUOTED_TEXT = 31
FilterQueryLexerKEY = 32
FilterQueryLexerWS = 33
FilterQueryLexerFREETEXT = 34
FilterQueryLexerBOOL = 28
FilterQueryLexerNUMBER = 29
FilterQueryLexerQUOTED_TEXT = 30
FilterQueryLexerKEY = 31
FilterQueryLexerWS = 32
FilterQueryLexerFREETEXT = 33
)

View File

@@ -40,8 +40,8 @@ func filterqueryParserInit() {
"", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
"HAS", "HASANY", "HASALL", "HASNONE", "BOOL", "NUMBER", "QUOTED_TEXT",
"KEY", "WS", "FREETEXT",
"HAS", "HASANY", "HASALL", "BOOL", "NUMBER", "QUOTED_TEXT", "KEY", "WS",
"FREETEXT",
}
staticData.RuleNames = []string{
"query", "expression", "orExpression", "andExpression", "unaryExpression",
@@ -51,7 +51,7 @@ func filterqueryParserInit() {
}
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 1, 34, 212, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7,
4, 1, 33, 212, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7,
4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7,
10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15,
2, 16, 7, 16, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 5, 2, 43,
@@ -72,7 +72,7 @@ func filterqueryParserInit() {
13, 1, 13, 1, 13, 3, 13, 202, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15,
1, 15, 1, 16, 1, 16, 1, 16, 0, 0, 17, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18,
20, 22, 24, 26, 28, 30, 32, 0, 6, 1, 0, 7, 8, 2, 0, 13, 13, 15, 15, 2,
0, 14, 14, 16, 16, 2, 0, 31, 31, 34, 34, 1, 0, 25, 28, 1, 0, 29, 32, 225,
0, 14, 14, 16, 16, 2, 0, 30, 30, 33, 33, 1, 0, 25, 27, 1, 0, 28, 31, 225,
0, 34, 1, 0, 0, 0, 2, 37, 1, 0, 0, 0, 4, 39, 1, 0, 0, 0, 6, 47, 1, 0, 0,
0, 8, 57, 1, 0, 0, 0, 10, 69, 1, 0, 0, 0, 12, 147, 1, 0, 0, 0, 14, 159,
1, 0, 0, 0, 16, 173, 1, 0, 0, 0, 18, 175, 1, 0, 0, 0, 20, 183, 1, 0, 0,
@@ -140,7 +140,7 @@ func filterqueryParserInit() {
201, 198, 1, 0, 0, 0, 201, 199, 1, 0, 0, 0, 201, 200, 1, 0, 0, 0, 202,
27, 1, 0, 0, 0, 203, 204, 5, 3, 0, 0, 204, 205, 3, 18, 9, 0, 205, 206,
5, 4, 0, 0, 206, 29, 1, 0, 0, 0, 207, 208, 7, 5, 0, 0, 208, 31, 1, 0, 0,
0, 209, 210, 5, 32, 0, 0, 210, 33, 1, 0, 0, 0, 11, 44, 51, 53, 57, 69,
0, 209, 210, 5, 31, 0, 0, 210, 33, 1, 0, 0, 0, 11, 44, 51, 53, 57, 69,
147, 159, 173, 180, 195, 201,
}
deserializer := antlr.NewATNDeserializer(nil)
@@ -207,13 +207,12 @@ const (
FilterQueryParserHAS = 25
FilterQueryParserHASANY = 26
FilterQueryParserHASALL = 27
FilterQueryParserHASNONE = 28
FilterQueryParserBOOL = 29
FilterQueryParserNUMBER = 30
FilterQueryParserQUOTED_TEXT = 31
FilterQueryParserKEY = 32
FilterQueryParserWS = 33
FilterQueryParserFREETEXT = 34
FilterQueryParserBOOL = 28
FilterQueryParserNUMBER = 29
FilterQueryParserQUOTED_TEXT = 30
FilterQueryParserKEY = 31
FilterQueryParserWS = 32
FilterQueryParserFREETEXT = 33
)
// FilterQueryParser rules.
@@ -803,7 +802,7 @@ func (p *FilterQueryParser) AndExpression() (localctx IAndExpressionContext) {
}
_la = p.GetTokenStream().LA(1)
for (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&24138219522) != 0 {
for (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&12058624002) != 0 {
p.SetState(51)
p.GetErrorHandler().Sync(p)
if p.HasError() {
@@ -825,7 +824,7 @@ func (p *FilterQueryParser) AndExpression() (localctx IAndExpressionContext) {
p.UnaryExpression()
}
case FilterQueryParserLPAREN, FilterQueryParserNOT, FilterQueryParserHAS, FilterQueryParserHASANY, FilterQueryParserHASALL, FilterQueryParserHASNONE, FilterQueryParserQUOTED_TEXT, FilterQueryParserKEY, FilterQueryParserFREETEXT:
case FilterQueryParserLPAREN, FilterQueryParserNOT, FilterQueryParserHAS, FilterQueryParserHASANY, FilterQueryParserHASALL, FilterQueryParserQUOTED_TEXT, FilterQueryParserKEY, FilterQueryParserFREETEXT:
{
p.SetState(50)
p.UnaryExpression()
@@ -2653,7 +2652,6 @@ type IFunctionCallContext interface {
HAS() antlr.TerminalNode
HASANY() antlr.TerminalNode
HASALL() antlr.TerminalNode
HASNONE() antlr.TerminalNode
// IsFunctionCallContext differentiates from other interfaces.
IsFunctionCallContext()
@@ -2727,10 +2725,6 @@ func (s *FunctionCallContext) HASALL() antlr.TerminalNode {
return s.GetToken(FilterQueryParserHASALL, 0)
}
func (s *FunctionCallContext) HASNONE() antlr.TerminalNode {
return s.GetToken(FilterQueryParserHASNONE, 0)
}
func (s *FunctionCallContext) GetRuleContext() antlr.RuleContext {
return s
}
@@ -2771,7 +2765,7 @@ func (p *FilterQueryParser) FunctionCall() (localctx IFunctionCallContext) {
p.SetState(185)
_la = p.GetTokenStream().LA(1)
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&503316480) != 0) {
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&234881024) != 0) {
p.GetErrorHandler().RecoverInline(p)
} else {
p.GetErrorHandler().ReportMatch(p)
@@ -3411,7 +3405,7 @@ func (p *FilterQueryParser) Value() (localctx IValueContext) {
p.SetState(207)
_la = p.GetTokenStream().LA(1)
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&8053063680) != 0) {
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&4026531840) != 0) {
p.GetErrorHandler().RecoverInline(p)
} else {
p.GetErrorHandler().ReportMatch(p)

View File

@@ -0,0 +1,48 @@
package parser
import (
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/antlr4-go/antlr/v4"
)
// QueryStringToKeysSelectors converts a query string to a list of field key selectors
//
// e.g. "service.name="query-service" AND http.status_code=200 AND resource.k8s.namespace.name="application"" -> []*telemetrytypes.FieldKeySelector{
// {
// Name: "service.name",
// FieldContext: telemetrytypes.FieldContextUnspecified,
// FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
// },
// {
// Name: "http.status_code",
// FieldContext: telemetrytypes.FieldContextUnspecified,
// FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
// },
// {
// Name: "resource.k8s.namespace.name",
// FieldContext: telemetrytypes.FieldContextResource,
// FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
// },
// }
func QueryStringToKeysSelectors(query string) ([]*telemetrytypes.FieldKeySelector, error) {
lexer := NewFilterQueryLexer(antlr.NewInputStream(query))
keys := []*telemetrytypes.FieldKeySelector{}
for {
tok := lexer.NextToken()
if tok.GetTokenType() == antlr.TokenEOF {
break
}
if tok.GetTokenType() == FilterQueryLexerKEY {
key := telemetrytypes.GetFieldKeyFromKeyText(tok.GetText())
keys = append(keys, &telemetrytypes.FieldKeySelector{
Name: key.Name,
Signal: key.Signal,
FieldContext: key.FieldContext,
FieldDataType: key.FieldDataType,
})
}
}
return keys, nil
}

View File

@@ -0,0 +1,101 @@
package parser
import (
"testing"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
func TestQueryToKeys(t *testing.T) {
testCases := []struct {
query string
expectedKeys []telemetrytypes.FieldKeySelector
}{
{
query: `service.name="redis"`,
expectedKeys: []telemetrytypes.FieldKeySelector{
{
Name: "service.name",
Signal: telemetrytypes.SignalUnspecified,
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
},
{
query: `resource.service.name="redis"`,
expectedKeys: []telemetrytypes.FieldKeySelector{
{
Name: "service.name",
Signal: telemetrytypes.SignalUnspecified,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
},
{
query: `service.name="redis" AND http.status_code=200`,
expectedKeys: []telemetrytypes.FieldKeySelector{
{
Name: "service.name",
Signal: telemetrytypes.SignalUnspecified,
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
{
Name: "http.status_code",
Signal: telemetrytypes.SignalUnspecified,
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
},
{
query: `has(payload.user_ids, 123)`,
expectedKeys: []telemetrytypes.FieldKeySelector{
{
Name: "payload.user_ids",
Signal: telemetrytypes.SignalUnspecified,
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
},
{
query: `body.user_ids[*] = 123`,
expectedKeys: []telemetrytypes.FieldKeySelector{
{
Name: "body.user_ids[*]",
Signal: telemetrytypes.SignalUnspecified,
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
},
}
for _, testCase := range testCases {
keys, err := QueryStringToKeysSelectors(testCase.query)
if err != nil {
t.Fatalf("Error: %v", err)
}
if len(keys) != len(testCase.expectedKeys) {
t.Fatalf("Expected %d keys, got %d", len(testCase.expectedKeys), len(keys))
}
for i, key := range keys {
if key.Name != testCase.expectedKeys[i].Name {
t.Fatalf("Expected key %v, got %v", testCase.expectedKeys[i], key)
}
if key.Signal != testCase.expectedKeys[i].Signal {
t.Fatalf("Expected signal %v, got %v", testCase.expectedKeys[i].Signal, key.Signal)
}
if key.FieldContext != testCase.expectedKeys[i].FieldContext {
t.Fatalf("Expected field context %v, got %v", testCase.expectedKeys[i].FieldContext, key.FieldContext)
}
if key.FieldDataType != testCase.expectedKeys[i].FieldDataType {
t.Fatalf("Expected field data type %v, got %v", testCase.expectedKeys[i].FieldDataType, key.FieldDataType)
}
}
}
}

View File

@@ -0,0 +1,587 @@
package parser
import (
"context"
"fmt"
"strconv"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/antlr4-go/antlr/v4"
sqlbuilder "github.com/huandu/go-sqlbuilder"
)
// WhereClauseVisitor implements the FilterQueryVisitor interface
// to convert the parsed filter expressions into ClickHouse WHERE clause
type WhereClauseVisitor struct {
conditionBuilder qbtypes.ConditionBuilder
warnings []error
fieldKeys map[string][]telemetrytypes.TelemetryFieldKey
errors []error
builder *sqlbuilder.SelectBuilder
fullTextColumn telemetrytypes.TelemetryFieldKey
}
// NewWhereClauseVisitor creates a new WhereClauseVisitor
func NewWhereClauseVisitor(
conditionBuilder qbtypes.ConditionBuilder,
fieldKeys map[string][]telemetrytypes.TelemetryFieldKey,
builder *sqlbuilder.SelectBuilder,
fullTextColumn telemetrytypes.TelemetryFieldKey,
) *WhereClauseVisitor {
return &WhereClauseVisitor{
conditionBuilder: conditionBuilder,
fieldKeys: fieldKeys,
builder: builder,
fullTextColumn: fullTextColumn,
}
}
type SyntaxError struct {
line, column int
msg string
}
func (e *SyntaxError) Error() string {
return fmt.Sprintf("line %d:%d %s", e.line, e.column, e.msg)
}
// ErrorListener is a custom error listener to capture syntax errors
type ErrorListener struct {
*antlr.DefaultErrorListener
Errors []error
}
// NewErrorListener creates a new error listener
func NewErrorListener() *ErrorListener {
return &ErrorListener{
DefaultErrorListener: antlr.NewDefaultErrorListener(),
Errors: []error{},
}
}
// SyntaxError captures syntax errors during parsing
func (l *ErrorListener) SyntaxError(recognizer antlr.Recognizer, offendingSymbol any, line, column int, msg string, e antlr.RecognitionException) {
l.Errors = append(l.Errors, &SyntaxError{line: line, column: column, msg: msg})
}
// PrepareWhereClause generates a ClickHouse compatible WHERE clause from the filter query
func PrepareWhereClause(
query string,
fieldKeys map[string][]telemetrytypes.TelemetryFieldKey,
conditionBuilder qbtypes.ConditionBuilder,
fullTextColumn telemetrytypes.TelemetryFieldKey,
) (string, []any, []error, error) {
// Setup the ANTLR parsing pipeline
input := antlr.NewInputStream(query)
lexer := NewFilterQueryLexer(input)
sb := sqlbuilder.NewSelectBuilder()
visitor := NewWhereClauseVisitor(conditionBuilder, fieldKeys, sb, fullTextColumn)
// Set up error handling
lexerErrorListener := NewErrorListener()
lexer.RemoveErrorListeners()
lexer.AddErrorListener(lexerErrorListener)
tokens := antlr.NewCommonTokenStream(lexer, 0)
parserErrorListener := NewErrorListener()
parser := NewFilterQueryParser(tokens)
parser.RemoveErrorListeners()
parser.AddErrorListener(parserErrorListener)
// Parse the query
tree := parser.Query()
// Handle syntax errors
if len(parserErrorListener.Errors) > 0 {
combinedErrors := errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"found %d syntax errors while parsing the search expression: %v",
len(parserErrorListener.Errors),
parserErrorListener.Errors,
)
return "", nil, nil, combinedErrors
}
// Visit the parse tree with our ClickHouse visitor
cond := visitor.Visit(tree).(string)
if len(visitor.errors) > 0 {
// combine all errors into a single error
combinedErrors := errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"found %d errors while parsing the search expression: %v",
len(visitor.errors),
visitor.errors,
)
return "", nil, nil, combinedErrors
}
whereClause, args := visitor.builder.Where(cond).BuildWithFlavor(sqlbuilder.ClickHouse)
return whereClause, args, visitor.warnings, nil
}
// Visit dispatches to the specific visit method based on node type
func (v *WhereClauseVisitor) Visit(tree antlr.ParseTree) any {
// Handle nil nodes to prevent panic
if tree == nil {
return ""
}
switch t := tree.(type) {
case *QueryContext:
return v.VisitQuery(t)
case *ExpressionContext:
return v.VisitExpression(t)
case *OrExpressionContext:
return v.VisitOrExpression(t)
case *AndExpressionContext:
return v.VisitAndExpression(t)
case *UnaryExpressionContext:
return v.VisitUnaryExpression(t)
case *PrimaryContext:
return v.VisitPrimary(t)
case *ComparisonContext:
return v.VisitComparison(t)
case *InClauseContext:
return v.VisitInClause(t)
case *NotInClauseContext:
return v.VisitNotInClause(t)
case *ValueListContext:
return v.VisitValueList(t)
case *FullTextContext:
return v.VisitFullText(t)
case *FunctionCallContext:
return v.VisitFunctionCall(t)
case *FunctionParamListContext:
return v.VisitFunctionParamList(t)
case *FunctionParamContext:
return v.VisitFunctionParam(t)
case *ArrayContext:
return v.VisitArray(t)
case *ValueContext:
return v.VisitValue(t)
case *KeyContext:
return v.VisitKey(t)
default:
return ""
}
}
func (v *WhereClauseVisitor) VisitQuery(ctx *QueryContext) any {
return v.Visit(ctx.Expression())
}
// VisitExpression passes through to the orExpression
func (v *WhereClauseVisitor) VisitExpression(ctx *ExpressionContext) any {
return v.Visit(ctx.OrExpression())
}
// VisitOrExpression handles OR expressions
func (v *WhereClauseVisitor) VisitOrExpression(ctx *OrExpressionContext) any {
andExpressions := ctx.AllAndExpression()
andExpressionConditions := make([]string, len(andExpressions))
for i, expr := range andExpressions {
andExpressionConditions[i] = v.Visit(expr).(string)
}
if len(andExpressionConditions) == 1 {
return andExpressionConditions[0]
}
return v.builder.Or(andExpressionConditions...)
}
// VisitAndExpression handles AND expressions
func (v *WhereClauseVisitor) VisitAndExpression(ctx *AndExpressionContext) any {
unaryExpressions := ctx.AllUnaryExpression()
unaryExpressionConditions := make([]string, len(unaryExpressions))
for i, expr := range unaryExpressions {
unaryExpressionConditions[i] = v.Visit(expr).(string)
}
if len(unaryExpressionConditions) == 1 {
return unaryExpressionConditions[0]
}
return v.builder.And(unaryExpressionConditions...)
}
// VisitUnaryExpression handles NOT expressions
func (v *WhereClauseVisitor) VisitUnaryExpression(ctx *UnaryExpressionContext) any {
result := v.Visit(ctx.Primary()).(string)
// Check if this is a NOT expression
if ctx.NOT() != nil {
return fmt.Sprintf("NOT (%s)", result)
}
return result
}
// VisitPrimary handles grouped expressions, comparisons, function calls, and full-text search
func (v *WhereClauseVisitor) VisitPrimary(ctx *PrimaryContext) any {
if ctx.OrExpression() != nil {
// This is a parenthesized expression
return fmt.Sprintf("(%s)", v.Visit(ctx.OrExpression()).(string))
} else if ctx.Comparison() != nil {
return v.Visit(ctx.Comparison())
} else if ctx.FunctionCall() != nil {
return v.Visit(ctx.FunctionCall())
} else if ctx.FullText() != nil {
return v.Visit(ctx.FullText())
}
// Handle standalone key as a full text search term
if ctx.GetChildCount() == 1 {
child := ctx.GetChild(0)
if keyCtx, ok := child.(*KeyContext); ok {
// create a full text search condition on the body field
keyText := keyCtx.GetText()
cond, err := v.conditionBuilder.GetCondition(context.Background(), &v.fullTextColumn, qbtypes.FilterOperatorRegexp, keyText, v.builder)
if err != nil {
return ""
}
return cond
}
}
return "" // Should not happen with valid input
}
// VisitComparison handles all comparison operators
func (v *WhereClauseVisitor) VisitComparison(ctx *ComparisonContext) any {
keys := v.Visit(ctx.Key()).([]telemetrytypes.TelemetryFieldKey)
// Handle EXISTS specially
if ctx.EXISTS() != nil {
op := qbtypes.FilterOperatorExists
if ctx.NOT() != nil {
op = qbtypes.FilterOperatorNotExists
}
var conds []string
for _, key := range keys {
condition, err := v.conditionBuilder.GetCondition(context.Background(), &key, op, nil, v.builder)
if err != nil {
return ""
}
conds = append(conds, condition)
}
return v.builder.Or(conds...)
}
// Handle IN clause
if ctx.InClause() != nil || ctx.NotInClause() != nil {
values := v.Visit(ctx.InClause()).([]any)
op := qbtypes.FilterOperatorIn
if ctx.NotInClause() != nil {
op = qbtypes.FilterOperatorNotIn
}
var conds []string
for _, key := range keys {
condition, err := v.conditionBuilder.GetCondition(context.Background(), &key, op, values, v.builder)
if err != nil {
return ""
}
conds = append(conds, condition)
}
return v.builder.Or(conds...)
}
// Handle BETWEEN
if ctx.BETWEEN() != nil {
op := qbtypes.FilterOperatorBetween
if ctx.NOT() != nil {
op = qbtypes.FilterOperatorNotBetween
}
values := ctx.AllValue()
if len(values) != 2 {
return ""
}
value1 := v.Visit(values[0])
value2 := v.Visit(values[1])
var conds []string
for _, key := range keys {
condition, err := v.conditionBuilder.GetCondition(context.Background(), &key, op, []any{value1, value2}, v.builder)
if err != nil {
return ""
}
conds = append(conds, condition)
}
return v.builder.Or(conds...)
}
// Get all values for operations that need them
values := ctx.AllValue()
if len(values) > 0 {
value := v.Visit(values[0])
var op qbtypes.FilterOperator
// Handle each type of comparison
if ctx.EQUALS() != nil {
op = qbtypes.FilterOperatorEqual
} else if ctx.NOT_EQUALS() != nil || ctx.NEQ() != nil {
op = qbtypes.FilterOperatorNotEqual
} else if ctx.LT() != nil {
op = qbtypes.FilterOperatorLessThan
} else if ctx.LE() != nil {
op = qbtypes.FilterOperatorLessThanOrEq
} else if ctx.GT() != nil {
op = qbtypes.FilterOperatorGreaterThan
} else if ctx.GE() != nil {
op = qbtypes.FilterOperatorGreaterThanOrEq
} else if ctx.LIKE() != nil {
op = qbtypes.FilterOperatorLike
} else if ctx.ILIKE() != nil {
op = qbtypes.FilterOperatorLike
} else if ctx.NOT_LIKE() != nil {
op = qbtypes.FilterOperatorNotLike
} else if ctx.NOT_ILIKE() != nil {
op = qbtypes.FilterOperatorNotLike
} else if ctx.REGEXP() != nil {
op = qbtypes.FilterOperatorRegexp
} else if ctx.NOT() != nil && ctx.REGEXP() != nil {
op = qbtypes.FilterOperatorNotRegexp
} else if ctx.CONTAINS() != nil {
op = qbtypes.FilterOperatorContains
} else if ctx.NOT() != nil && ctx.CONTAINS() != nil {
op = qbtypes.FilterOperatorNotContains
}
var conds []string
for _, key := range keys {
condition, err := v.conditionBuilder.GetCondition(context.Background(), &key, op, value, v.builder)
if err != nil {
return ""
}
conds = append(conds, condition)
}
return v.builder.Or(conds...)
}
return "" // Should not happen with valid input
}
// VisitInClause handles IN expressions
func (v *WhereClauseVisitor) VisitInClause(ctx *InClauseContext) any {
return v.Visit(ctx.ValueList())
}
// VisitNotInClause handles NOT IN expressions
func (v *WhereClauseVisitor) VisitNotInClause(ctx *NotInClauseContext) any {
return v.Visit(ctx.ValueList())
}
// VisitValueList handles comma-separated value lists
func (v *WhereClauseVisitor) VisitValueList(ctx *ValueListContext) any {
values := ctx.AllValue()
parts := []any{}
for _, val := range values {
parts = append(parts, v.Visit(val))
}
return parts
}
// VisitFullText handles standalone quoted strings for full-text search
func (v *WhereClauseVisitor) VisitFullText(ctx *FullTextContext) any {
// remove quotes from the quotedText
quotedText := strings.Trim(ctx.QUOTED_TEXT().GetText(), "\"'")
cond, err := v.conditionBuilder.GetCondition(context.Background(), &v.fullTextColumn, qbtypes.FilterOperatorRegexp, quotedText, v.builder)
if err != nil {
return ""
}
return cond
}
// VisitFunctionCall handles function calls like has(), hasAny(), etc.
func (v *WhereClauseVisitor) VisitFunctionCall(ctx *FunctionCallContext) any {
// Get function name based on which token is present
var functionName string
if ctx.HAS() != nil {
functionName = "has"
} else if ctx.HASANY() != nil {
functionName = "hasAny"
} else if ctx.HASALL() != nil {
functionName = "hasAll"
} else {
// Default fallback
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"unknown function `%s`",
ctx.GetText(),
))
return ""
}
params := v.Visit(ctx.FunctionParamList()).([]any)
if len(params) < 2 {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"function `%s` expects key and value parameters",
functionName,
))
return ""
}
keys, ok := params[0].([]telemetrytypes.TelemetryFieldKey)
if !ok {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"function `%s` expects key parameter to be a field key",
functionName,
))
return ""
}
value := params[1:]
var conds []string
for _, key := range keys {
var fieldName string
if strings.HasPrefix(key.Name, telemetrylogs.BodyJSONStringSearchPrefix) {
fieldName, _ = telemetrylogs.GetBodyJSONKey(context.Background(), &key, qbtypes.FilterOperatorUnknown, value)
} else {
fieldName, _ = v.conditionBuilder.GetTableFieldName(context.Background(), &key)
}
var cond string
// Map our functions to ClickHouse equivalents
switch functionName {
case "has":
cond = fmt.Sprintf("has(%s, %s)", fieldName, v.builder.Var(value[0]))
case "hasAny":
cond = fmt.Sprintf("hasAny(%s, %s)", fieldName, v.builder.Var(value))
case "hasAll":
cond = fmt.Sprintf("hasAll(%s, %s)", fieldName, v.builder.Var(value))
}
conds = append(conds, cond)
}
return v.builder.Or(conds...)
}
// VisitFunctionParamList handles the parameter list for function calls
func (v *WhereClauseVisitor) VisitFunctionParamList(ctx *FunctionParamListContext) any {
params := ctx.AllFunctionParam()
parts := make([]any, len(params))
for i, param := range params {
parts[i] = v.Visit(param)
}
return parts
}
// VisitFunctionParam handles individual parameters in function calls
func (v *WhereClauseVisitor) VisitFunctionParam(ctx *FunctionParamContext) any {
if ctx.Key() != nil {
return v.Visit(ctx.Key())
} else if ctx.Value() != nil {
return v.Visit(ctx.Value())
} else if ctx.Array() != nil {
return v.Visit(ctx.Array())
}
return "" // Should not happen with valid input
}
// VisitArray handles array literals
func (v *WhereClauseVisitor) VisitArray(ctx *ArrayContext) any {
return v.Visit(ctx.ValueList())
}
// VisitValue handles literal values: strings, numbers, booleans
func (v *WhereClauseVisitor) VisitValue(ctx *ValueContext) any {
if ctx.QUOTED_TEXT() != nil {
txt := ctx.QUOTED_TEXT().GetText()
// trim quotes and return the value
return strings.Trim(txt, "\"'")
} else if ctx.NUMBER() != nil {
number, err := strconv.ParseFloat(ctx.NUMBER().GetText(), 64)
if err != nil {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"failed to parse number %s",
ctx.NUMBER().GetText(),
))
return ""
}
return number
} else if ctx.BOOL() != nil {
// Convert to ClickHouse boolean literal
boolText := strings.ToLower(ctx.BOOL().GetText())
return boolText == "true"
} else if ctx.KEY() != nil {
// Why do we have a KEY context here?
// When the user writes an expression like `service.name=redis`
// The `redis` part is a VALUE context but parsed as a KEY token
// so we return the text as is
return ctx.KEY().GetText()
}
return "" // Should not happen with valid input
}
// VisitKey handles field/column references
func (v *WhereClauseVisitor) VisitKey(ctx *KeyContext) any {
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(ctx.KEY().GetText())
keyName := strings.TrimPrefix(fieldKey.Name, telemetrylogs.BodyJSONStringSearchPrefix)
fieldKeysForName := v.fieldKeys[keyName]
// for the body json search, we need to add search on the body field even
// if there is a field with the same name as attribute/resource attribute
// Since it will ORed with the fieldKeysForName, it will not result empty
// when either of them have values
if strings.HasPrefix(fieldKey.Name, telemetrylogs.BodyJSONStringSearchPrefix) {
fieldKeysForName = append(fieldKeysForName, fieldKey)
}
// TODO(srikanthccv): do we want to return an error here?
// should we infer the type and auto-magically build a key for expression?
if len(fieldKeysForName) == 0 {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"key `%s` not found",
fieldKey.Name,
))
}
if len(fieldKeysForName) > 1 {
// this is warning state, we must have a unambiguous key
v.warnings = append(v.warnings, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"key `%s` is ambiguous, found %d different combinations of field context and data type: %v",
fieldKey.Name,
len(fieldKeysForName),
fieldKeysForName,
))
}
return fieldKeysForName
}

View File

@@ -0,0 +1,638 @@
package parser
import (
"reflect"
"strings"
"testing"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrytraces"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
func TestConvertToClickHouseLogsQuery(t *testing.T) {
cases := []struct {
name string
fieldKeys map[string][]telemetrytypes.TelemetryFieldKey
query string
expectedSearchString string
expectedSearchArgs []any
}{
{
name: "test-simple-service-name-filter",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"service.name": {
{
Name: "service.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "service.name=redis",
expectedSearchString: "WHERE (resources_string['service.name'] = ?)",
expectedSearchArgs: []any{"redis"},
},
{
name: "test-simple-service-name-filter-with-materialised-column",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"service.name": {
{
Name: "service.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
},
},
query: "service.name=redis",
expectedSearchString: "WHERE (resource_string_service$$name = ?)",
expectedSearchArgs: []any{"redis"},
},
{
name: "http-status-code-multiple-data-types",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"http.status_code": {
{
Name: "http.status_code",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
{
Name: "http.status_code",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "http.status_code=200",
expectedSearchString: "WHERE (attributes_number['http.status_code'] = ? OR toFloat64OrNull(attributes_string['http.status_code']) = ?)",
expectedSearchArgs: []any{float64(200), float64(200)},
},
{
name: "http-status-code-multiple-data-types-between-operator",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"http.status_code": {
{
Name: "http.status_code",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
{
Name: "http.status_code",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "http.status_code between 200 and 300",
expectedSearchString: "WHERE (attributes_number['http.status_code'] BETWEEN ? AND ? OR toFloat64OrNull(attributes_string['http.status_code']) BETWEEN ? AND ?)",
expectedSearchArgs: []any{float64(200), float64(300), float64(200), float64(300)},
},
{
name: "response-body-multiple-data-types-string-contains",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"response.body": {
{
Name: "response.body",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
{
Name: "response.body",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "response.body contains error",
expectedSearchString: "WHERE (LOWER(toString(attributes_number['response.body'])) LIKE LOWER(?) OR LOWER(attributes_string['response.body']) LIKE LOWER(?))",
expectedSearchArgs: []any{"%error%", "%error%"},
},
{
name: "search-on-top-level-key",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"severity_text": {
{
Name: "severity_text",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "severity_text=error",
expectedSearchString: "WHERE (severity_text = ?)",
expectedSearchArgs: []any{"error"},
},
{
name: "search-on-top-level-key-conflict-with-attribute",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"severity_text": {
{
Name: "severity_text",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: "severity_text",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "severity_text=error",
expectedSearchString: "WHERE (severity_text = ? OR attributes_string['severity_text'] = ?)",
expectedSearchArgs: []any{"error", "error"},
},
{
name: "collision-with-attribute-field-and-resource-attribute",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"k8s.namespace.name": {
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "k8s.namespace.name=test",
expectedSearchString: "WHERE (resources_string['k8s.namespace.name'] = ? OR attributes_string['k8s.namespace.name'] = ?)",
expectedSearchArgs: []any{"test", "test"},
},
{
name: "collision-with-attribute-field-and-resource-attribute-materialised-column",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"k8s.namespace.name": {
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "k8s.namespace.name=test",
expectedSearchString: "WHERE (resource_string_k8s$$namespace$$name = ? OR attributes_string['k8s.namespace.name'] = ?)",
expectedSearchArgs: []any{"test", "test"},
},
{
name: "boolean-collision-with-attribute-field-and-data-type-boolean",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"did_user_login": {
{
Name: "did_user_login",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
{
Name: "did_user_login",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "did_user_login=true",
expectedSearchString: "WHERE (attributes_bool['did_user_login'] = ? OR attributes_string['did_user_login'] = ?)",
expectedSearchArgs: []any{true, "true"},
},
{
name: "regexp-search",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"k8s.namespace.name": {
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"service.name": {
{
Name: "service.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "k8s.namespace.name REGEXP 'test' OR service.name='redis'",
expectedSearchString: "WHERE (((match(attributes_string['k8s.namespace.name'], ?))) OR (resources_string['service.name'] = ?))",
expectedSearchArgs: []any{"test", "redis"},
},
{
name: "full-text-search-multiple-words",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: "waiting for response",
expectedSearchString: "WHERE ((match(body, ?)) AND (match(body, ?)) AND (match(body, ?)))",
expectedSearchArgs: []any{"waiting", "for", "response"},
},
{
name: "full-text-search-with-phrase-search",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: `"waiting for response"`,
expectedSearchString: "WHERE (match(body, ?))",
expectedSearchArgs: []any{"waiting for response"},
},
{
name: "full-text-search-with-word-and-not-word",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: "error NOT buggy_app",
expectedSearchString: "WHERE ((match(body, ?)) AND NOT ((match(body, ?))))",
expectedSearchArgs: []any{"error", "buggy_app"},
},
{
name: "full-text-search-with-word-and-not-word-and-not-word",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: "error NOT buggy_app NOT redis",
expectedSearchString: "WHERE ((match(body, ?)) AND NOT ((match(body, ?))) AND NOT ((match(body, ?))))",
expectedSearchArgs: []any{"error", "buggy_app", "redis"},
},
{
name: "full-text-search-with-word-and-not-word-and-not-word-tricky",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: "error NOT buggy_app OR redis",
expectedSearchString: "WHERE (((match(body, ?)) AND NOT ((match(body, ?)))) OR (match(body, ?)))",
expectedSearchArgs: []any{"error", "buggy_app", "redis"},
},
{
name: "has-function",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"service.name": {
{
Name: "service.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextResource,
},
},
"payload.user_ids": {
{
Name: "payload.user_ids",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
},
},
},
query: "has(service.name, 'redis')",
expectedSearchString: "WHERE (has(resources_string['service.name'], ?))",
expectedSearchArgs: []any{"redis"},
},
{
name: "has-from-list-of-values",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: "has(body.payload.user_ids[*], 'u1292')",
expectedSearchString: "WHERE (has(JSONExtract(JSON_QUERY(body, '$.payload.user_ids[*]'), 'Array(String)'), ?))",
expectedSearchArgs: []any{"u1292"},
},
{
name: "body-json-search-that-also-has-attribute-with-same-name",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"http.status_code": {
{
Name: "http.status_code",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
Materialized: true,
},
},
},
query: "body.http.status_code=200",
expectedSearchString: "WHERE (attribute_number_http$$status_code = ? OR JSONExtract(JSON_VALUE(body, '$.http.status_code'), 'Float64') = ?)",
expectedSearchArgs: []any{float64(200), float64(200)},
},
}
for _, c := range cases {
t.Logf("running test %s", c.name)
chQuery, chQueryArgs, _, err := PrepareWhereClause(c.query, c.fieldKeys, telemetrylogs.NewConditionBuilder(), telemetrytypes.TelemetryFieldKey{
Name: "body",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
})
if err != nil {
t.Errorf("Error converting query to ClickHouse: %v", err)
}
if chQuery != c.expectedSearchString {
t.Errorf("Expected %s, got %s", c.expectedSearchString, chQuery)
}
if !reflect.DeepEqual(chQueryArgs, c.expectedSearchArgs) {
for i, arg := range chQueryArgs {
t.Logf("Expected %v with type %T, got %v with type %T\n", c.expectedSearchArgs[i], c.expectedSearchArgs[i], arg, arg)
}
t.Errorf("Expected %v, got %v", c.expectedSearchArgs, chQueryArgs)
}
}
}
func TestConvertToClickHouseSpansQuery(t *testing.T) {
cases := []struct {
name string
fieldKeys map[string][]telemetrytypes.TelemetryFieldKey
query string
expectedSearchString string
expectedSearchArgs []any
}{
{
name: "test-simple-service-name-filter",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"service.name": {
{
Name: "service.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "service.name=redis",
expectedSearchString: "WHERE (resources_string['service.name'] = ?)",
expectedSearchArgs: []any{"redis"},
},
{
name: "test-simple-service-name-filter-with-materialised-column",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"service.name": {
{
Name: "service.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
},
},
query: "service.name=redis",
expectedSearchString: "WHERE (resource_string_service$$name = ?)",
expectedSearchArgs: []any{"redis"},
},
{
name: "http-status-code-multiple-data-types",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"http.status_code": {
{
Name: "http.status_code",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
{
Name: "http.status_code",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "http.status_code=200",
expectedSearchString: "WHERE (attributes_number['http.status_code'] = ? OR toFloat64OrNull(attributes_string['http.status_code']) = ?)",
expectedSearchArgs: []any{float64(200), float64(200)},
},
{
name: "http-status-code-multiple-data-types-between-operator",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"http.status_code": {
{
Name: "http.status_code",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
{
Name: "http.status_code",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "http.status_code between 200 and 300",
expectedSearchString: "WHERE (attributes_number['http.status_code'] BETWEEN ? AND ? OR toFloat64OrNull(attributes_string['http.status_code']) BETWEEN ? AND ?)",
expectedSearchArgs: []any{float64(200), float64(300), float64(200), float64(300)},
},
{
name: "response-body-multiple-data-types-string-contains",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"response.body": {
{
Name: "response.body",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
{
Name: "response.body",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "response.body contains error",
expectedSearchString: "WHERE (LOWER(toString(attributes_number['response.body'])) LIKE LOWER(?) OR LOWER(attributes_string['response.body']) LIKE LOWER(?))",
expectedSearchArgs: []any{"%error%", "%error%"},
},
{
name: "collision-with-attribute-field-and-resource-attribute",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"k8s.namespace.name": {
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "k8s.namespace.name=test",
expectedSearchString: "WHERE (resources_string['k8s.namespace.name'] = ? OR attributes_string['k8s.namespace.name'] = ?)",
expectedSearchArgs: []any{"test", "test"},
},
{
name: "collision-with-attribute-field-and-resource-attribute-materialised-column",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"k8s.namespace.name": {
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "k8s.namespace.name=test",
expectedSearchString: "WHERE (resource_string_k8s$$namespace$$name = ? OR attributes_string['k8s.namespace.name'] = ?)",
expectedSearchArgs: []any{"test", "test"},
},
{
name: "boolean-collision-with-attribute-field-and-data-type-boolean",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"did_user_login": {
{
Name: "did_user_login",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
{
Name: "did_user_login",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "did_user_login=true",
expectedSearchString: "WHERE (attributes_bool['did_user_login'] = ? OR attributes_string['did_user_login'] = ?)",
expectedSearchArgs: []any{true, "true"},
},
{
name: "regexp-search",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"k8s.namespace.name": {
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"service.name": {
{
Name: "service.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "k8s.namespace.name REGEXP 'test' OR service.name='redis'",
expectedSearchString: "WHERE (((match(attributes_string['k8s.namespace.name'], ?))) OR (resources_string['service.name'] = ?))",
expectedSearchArgs: []any{"test", "redis"},
},
}
for _, c := range cases {
chQuery, chQueryArgs, _, err := PrepareWhereClause(c.query, c.fieldKeys, telemetrytraces.NewConditionBuilder(), telemetrytypes.TelemetryFieldKey{
Name: "dummy",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
})
if err != nil {
t.Errorf("Error converting query to ClickHouse: %v", err)
}
if chQuery != c.expectedSearchString {
t.Errorf("Expected %s, got %s", c.expectedSearchString, chQuery)
}
if !reflect.DeepEqual(chQueryArgs, c.expectedSearchArgs) {
for i, arg := range chQueryArgs {
t.Logf("Expected %v with type %T, got %v with type %T\n", c.expectedSearchArgs[i], c.expectedSearchArgs[i], arg, arg)
}
t.Errorf("Expected %v, got %v", c.expectedSearchArgs, chQueryArgs)
}
}
}
func TestConvertToClickHouseSpansQueryWithErrors(t *testing.T) {
cases := []struct {
name string
fieldKeys map[string][]telemetrytypes.TelemetryFieldKey
query string
expectedSearchString string
expectedSearchArgs []any
expectedErrorSubString string
expectedWarnings []error
}{
{
name: "has-function-with-multiple-values",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: "key.that.does.not.exist = 'redis'",
expectedSearchString: "",
expectedSearchArgs: []any{},
expectedErrorSubString: "key `key.that.does.not.exist` not found",
expectedWarnings: []error{},
},
{
name: "unknown-function",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: "unknown.function()",
expectedSearchString: "",
expectedSearchArgs: []any{},
expectedErrorSubString: "expecting {'(', NOT, HAS, HASANY, HASALL, QUOTED_TEXT, KEY, FREETEXT}",
expectedWarnings: []error{},
},
{
name: "has-function-not-enough-params",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: "has(key.that.does.not.exist)",
expectedSearchString: "",
expectedSearchArgs: []any{},
expectedErrorSubString: "function `has` expects key and value parameters",
expectedWarnings: []error{},
},
}
for _, c := range cases {
_, _, warnings, err := PrepareWhereClause(c.query, c.fieldKeys, telemetrytraces.NewConditionBuilder(), telemetrytypes.TelemetryFieldKey{
Name: "dummy",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
})
if err != nil {
if !strings.Contains(err.Error(), c.expectedErrorSubString) {
t.Errorf("Expected error %v, got %v", c.expectedErrorSubString, err)
}
}
if len(warnings) != len(c.expectedWarnings) {
t.Errorf("Expected %d warnings, got %d", len(c.expectedWarnings), len(warnings))
}
for i, warning := range warnings {
if warning.Error() != c.expectedWarnings[i].Error() {
t.Errorf("Expected warning %d to be %v, got %v", i, c.expectedWarnings[i], warning)
}
}
}
}

View File

@@ -1085,7 +1085,7 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
item.Attributes_string[k] = fmt.Sprintf("%v", v)
}
for k, v := range item.Attributes_number {
item.Attributes_string[k] = fmt.Sprintf("%v", v)
item.Attributes_string[k] = strconv.FormatFloat(v, 'f', -1, 64)
}
for k, v := range item.Resources_string {
item.Attributes_string[k] = v
@@ -3928,11 +3928,16 @@ func (r *ClickHouseReader) GetLogAttributeKeys(ctx context.Context, req *v3.Filt
var rows driver.Rows
var response v3.FilterAttributeKeyResponse
tagTypeFilter := `tag_type != 'logfield'`
if req.TagType != "" {
tagTypeFilter = fmt.Sprintf(`tag_type != 'logfield' and tag_type = '%s'`, req.TagType)
}
if len(req.SearchText) != 0 {
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where tag_type != 'logfield' and tag_key ILIKE $1 limit $2", r.logsDB, r.logsTagAttributeTableV2)
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where %s and tag_key ILIKE $1 limit $2", r.logsDB, r.logsTagAttributeTableV2, tagTypeFilter)
rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText), req.Limit)
} else {
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where tag_type != 'logfield' limit $1", r.logsDB, r.logsTagAttributeTableV2)
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where %s limit $1", r.logsDB, r.logsTagAttributeTableV2, tagTypeFilter)
rows, err = r.db.Query(ctx, query, req.Limit)
}
@@ -3967,13 +3972,16 @@ func (r *ClickHouseReader) GetLogAttributeKeys(ctx context.Context, req *v3.Filt
response.AttributeKeys = append(response.AttributeKeys, key)
}
// add other attributes
for _, f := range constants.StaticFieldsLogsV3 {
if (v3.AttributeKey{} == f) {
continue
}
if len(req.SearchText) == 0 || strings.Contains(f.Key, req.SearchText) {
response.AttributeKeys = append(response.AttributeKeys, f)
// add other attributes only when the tagType is not specified
// i.e retrieve all attributes
if req.TagType == "" {
for _, f := range constants.StaticFieldsLogsV3 {
if (v3.AttributeKey{} == f) {
continue
}
if len(req.SearchText) == 0 || strings.Contains(f.Key, req.SearchText) {
response.AttributeKeys = append(response.AttributeKeys, f)
}
}
}
@@ -4233,11 +4241,12 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
isValidPoint = true
point.Value = float64(reflect.ValueOf(v).Elem().Float())
} else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float()))
val := strconv.FormatFloat(reflect.ValueOf(v).Elem().Float(), 'f', -1, 64)
groupBy = append(groupBy, val)
if _, ok := groupAttributes[colName]; !ok {
groupAttributesArray = append(groupAttributesArray, map[string]string{colName: fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float())})
groupAttributesArray = append(groupAttributesArray, map[string]string{colName: val})
}
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float())
groupAttributes[colName] = val
}
case **float64, **float32:
val := reflect.ValueOf(v)
@@ -4247,11 +4256,12 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
isValidPoint = true
point.Value = value
} else {
groupBy = append(groupBy, fmt.Sprintf("%v", value))
val := strconv.FormatFloat(value, 'f', -1, 64)
groupBy = append(groupBy, val)
if _, ok := groupAttributes[colName]; !ok {
groupAttributesArray = append(groupAttributesArray, map[string]string{colName: fmt.Sprintf("%v", value)})
groupAttributesArray = append(groupAttributesArray, map[string]string{colName: val})
}
groupAttributes[colName] = fmt.Sprintf("%v", value)
groupAttributes[colName] = val
}
}
case *uint, *uint8, *uint64, *uint16, *uint32:
@@ -4715,7 +4725,12 @@ func (r *ClickHouseReader) GetTraceAttributeKeys(ctx context.Context, req *v3.Fi
var rows driver.Rows
var response v3.FilterAttributeKeyResponse
query = fmt.Sprintf("SELECT DISTINCT(tag_key), tag_type, tag_data_type FROM %s.%s WHERE tag_key ILIKE $1 and tag_type != 'spanfield' LIMIT $2", r.TraceDB, r.spanAttributeTableV2)
tagTypeFilter := `tag_type != 'spanfield'`
if req.TagType != "" {
tagTypeFilter = fmt.Sprintf(`tag_type != 'spanfield' and tag_type = '%s'`, req.TagType)
}
query = fmt.Sprintf("SELECT DISTINCT(tag_key), tag_type, tag_data_type FROM %s.%s WHERE tag_key ILIKE $1 and %s LIMIT $2", r.TraceDB, r.spanAttributeTableV2, tagTypeFilter)
rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText), req.Limit)
@@ -4760,13 +4775,16 @@ func (r *ClickHouseReader) GetTraceAttributeKeys(ctx context.Context, req *v3.Fi
fields = constants.DeprecatedStaticFieldsTraces
}
// add the new static fields
for _, f := range fields {
if (v3.AttributeKey{} == f) {
continue
}
if len(req.SearchText) == 0 || strings.Contains(f.Key, req.SearchText) {
response.AttributeKeys = append(response.AttributeKeys, f)
// add the new static fields only when the tagType is not specified
// i.e retrieve all attributes
if req.TagType == "" {
for _, f := range fields {
if (v3.AttributeKey{} == f) {
continue
}
if len(req.SearchText) == 0 || strings.Contains(f.Key, req.SearchText) {
response.AttributeKeys = append(response.AttributeKeys, f)
}
}
}
@@ -6873,7 +6891,7 @@ func (r *ClickHouseReader) SearchTracesV2(ctx context.Context, params *model.Sea
item.Attributes_string[k] = fmt.Sprintf("%v", v)
}
for k, v := range item.Attributes_number {
item.Attributes_string[k] = fmt.Sprintf("%v", v)
item.Attributes_string[k] = strconv.FormatFloat(v, 'f', -1, 64)
}
for k, v := range item.Resources_string {
item.Attributes_string[k] = v

View File

@@ -8,68 +8,59 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/google/uuid"
"github.com/jmoiron/sqlx"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
)
type cloudProviderAccountsRepository interface {
listConnected(ctx context.Context, cloudProvider string) ([]AccountRecord, *model.ApiError)
listConnected(ctx context.Context, orgId string, provider string) ([]types.CloudIntegration, *model.ApiError)
get(ctx context.Context, cloudProvider string, id string) (*AccountRecord, *model.ApiError)
get(ctx context.Context, orgId string, provider string, id string) (*types.CloudIntegration, *model.ApiError)
getConnectedCloudAccount(
ctx context.Context, cloudProvider string, cloudAccountId string,
) (*AccountRecord, *model.ApiError)
getConnectedCloudAccount(ctx context.Context, orgId string, provider string, accountID string) (*types.CloudIntegration, *model.ApiError)
// Insert an account or update it by (cloudProvider, id)
// for specified non-empty fields
upsert(
ctx context.Context,
cloudProvider string,
orgId string,
provider string,
id *string,
config *AccountConfig,
cloudAccountId *string,
agentReport *AgentReport,
config *types.AccountConfig,
accountId *string,
agentReport *types.AgentReport,
removedAt *time.Time,
) (*AccountRecord, *model.ApiError)
) (*types.CloudIntegration, *model.ApiError)
}
func newCloudProviderAccountsRepository(db *sqlx.DB) (
func newCloudProviderAccountsRepository(store sqlstore.SQLStore) (
*cloudProviderAccountsSQLRepository, error,
) {
return &cloudProviderAccountsSQLRepository{
db: db,
store: store,
}, nil
}
type cloudProviderAccountsSQLRepository struct {
db *sqlx.DB
store sqlstore.SQLStore
}
func (r *cloudProviderAccountsSQLRepository) listConnected(
ctx context.Context, cloudProvider string,
) ([]AccountRecord, *model.ApiError) {
accounts := []AccountRecord{}
ctx context.Context, orgId string, cloudProvider string,
) ([]types.CloudIntegration, *model.ApiError) {
accounts := []types.CloudIntegration{}
err := r.store.BunDB().NewSelect().
Model(&accounts).
Where("org_id = ?", orgId).
Where("provider = ?", cloudProvider).
Where("removed_at is NULL").
Where("account_id is not NULL").
Where("last_agent_report is not NULL").
Order("created_at").
Scan(ctx)
err := r.db.SelectContext(
ctx, &accounts, `
select
cloud_provider,
id,
config_json,
cloud_account_id,
last_agent_report_json,
created_at,
removed_at
from cloud_integrations_accounts
where
cloud_provider=$1
and removed_at is NULL
and cloud_account_id is not NULL
and last_agent_report_json is not NULL
order by created_at
`, cloudProvider,
)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not query connected cloud accounts: %w", err,
@@ -80,27 +71,16 @@ func (r *cloudProviderAccountsSQLRepository) listConnected(
}
func (r *cloudProviderAccountsSQLRepository) get(
ctx context.Context, cloudProvider string, id string,
) (*AccountRecord, *model.ApiError) {
var result AccountRecord
ctx context.Context, orgId string, provider string, id string,
) (*types.CloudIntegration, *model.ApiError) {
var result types.CloudIntegration
err := r.db.GetContext(
ctx, &result, `
select
cloud_provider,
id,
config_json,
cloud_account_id,
last_agent_report_json,
created_at,
removed_at
from cloud_integrations_accounts
where
cloud_provider=$1
and id=$2
`,
cloudProvider, id,
)
err := r.store.BunDB().NewSelect().
Model(&result).
Where("org_id = ?", orgId).
Where("provider = ?", provider).
Where("id = ?", id).
Scan(ctx)
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
@@ -116,33 +96,22 @@ func (r *cloudProviderAccountsSQLRepository) get(
}
func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
ctx context.Context, cloudProvider string, cloudAccountId string,
) (*AccountRecord, *model.ApiError) {
var result AccountRecord
ctx context.Context, orgId string, provider string, accountId string,
) (*types.CloudIntegration, *model.ApiError) {
var result types.CloudIntegration
err := r.db.GetContext(
ctx, &result, `
select
cloud_provider,
id,
config_json,
cloud_account_id,
last_agent_report_json,
created_at,
removed_at
from cloud_integrations_accounts
where
cloud_provider=$1
and cloud_account_id=$2
and last_agent_report_json is not NULL
and removed_at is NULL
`,
cloudProvider, cloudAccountId,
)
err := r.store.BunDB().NewSelect().
Model(&result).
Where("org_id = ?", orgId).
Where("provider = ?", provider).
Where("account_id = ?", accountId).
Where("last_agent_report is not NULL").
Where("removed_at is NULL").
Scan(ctx)
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
"couldn't find connected cloud account %s", cloudAccountId,
"couldn't find connected cloud account %s", accountId,
))
} else if err != nil {
return nil, model.InternalError(fmt.Errorf(
@@ -155,17 +124,18 @@ func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
func (r *cloudProviderAccountsSQLRepository) upsert(
ctx context.Context,
cloudProvider string,
orgId string,
provider string,
id *string,
config *AccountConfig,
cloudAccountId *string,
agentReport *AgentReport,
config *types.AccountConfig,
accountId *string,
agentReport *types.AgentReport,
removedAt *time.Time,
) (*AccountRecord, *model.ApiError) {
) (*types.CloudIntegration, *model.ApiError) {
// Insert
if id == nil {
newId := uuid.NewString()
id = &newId
temp := valuer.GenerateUUID().StringValue()
id = &temp
}
// Prepare clause for setting values in `on conflict do update`
@@ -176,19 +146,19 @@ func (r *cloudProviderAccountsSQLRepository) upsert(
if config != nil {
onConflictSetStmts = append(
onConflictSetStmts, setColStatement("config_json"),
onConflictSetStmts, setColStatement("config"),
)
}
if cloudAccountId != nil {
if accountId != nil {
onConflictSetStmts = append(
onConflictSetStmts, setColStatement("cloud_account_id"),
onConflictSetStmts, setColStatement("account_id"),
)
}
if agentReport != nil {
onConflictSetStmts = append(
onConflictSetStmts, setColStatement("last_agent_report_json"),
onConflictSetStmts, setColStatement("last_agent_report"),
)
}
@@ -198,37 +168,45 @@ func (r *cloudProviderAccountsSQLRepository) upsert(
)
}
// set updated_at to current timestamp if it's an upsert
onConflictSetStmts = append(
onConflictSetStmts, setColStatement("updated_at"),
)
onConflictClause := ""
if len(onConflictSetStmts) > 0 {
onConflictClause = fmt.Sprintf(
"on conflict(cloud_provider, id) do update SET\n%s",
"conflict(id, provider, org_id) do update SET\n%s",
strings.Join(onConflictSetStmts, ",\n"),
)
}
insertQuery := fmt.Sprintf(`
INSERT INTO cloud_integrations_accounts (
cloud_provider,
id,
config_json,
cloud_account_id,
last_agent_report_json,
removed_at
) values ($1, $2, $3, $4, $5, $6)
%s`, onConflictClause,
)
integration := types.CloudIntegration{
OrgID: orgId,
Provider: provider,
Identifiable: types.Identifiable{ID: valuer.MustNewUUID(*id)},
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
Config: config,
AccountID: accountId,
LastAgentReport: agentReport,
RemovedAt: removedAt,
}
_, dbErr := r.store.BunDB().NewInsert().
Model(&integration).
On(onConflictClause).
Exec(ctx)
_, dbErr := r.db.ExecContext(
ctx, insertQuery,
cloudProvider, id, config, cloudAccountId, agentReport, removedAt,
)
if dbErr != nil {
return nil, model.InternalError(fmt.Errorf(
"could not upsert cloud account record: %w", dbErr,
))
}
upsertedAccount, apiErr := r.get(ctx, cloudProvider, *id)
upsertedAccount, apiErr := r.get(ctx, orgId, provider, *id)
if apiErr != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't fetch upserted account by id: %w", apiErr.ToError(),

View File

@@ -1,4 +1,4 @@
package services
package cloudintegrations
import (
"bytes"
@@ -15,11 +15,11 @@ import (
"golang.org/x/exp/maps"
)
func List(
func listCloudProviderServices(
cloudProvider string,
) ([]Definition, *model.ApiError) {
cloudServices, found := supportedServices[cloudProvider]
if !found || cloudServices == nil {
) ([]CloudServiceDetails, *model.ApiError) {
cloudServices := availableServices[cloudProvider]
if cloudServices == nil {
return nil, model.NotFoundError(fmt.Errorf(
"unsupported cloud provider: %s", cloudProvider,
))
@@ -33,21 +33,10 @@ func List(
return services, nil
}
func Map(cloudprovider string) (map[string]Definition, *model.ApiError) {
cloudServices, found := supportedServices[cloudprovider]
if !found || cloudServices == nil {
return nil, model.NotFoundError(fmt.Errorf(
"unsupported cloud provider: %s", cloudprovider,
))
}
return cloudServices, nil
}
func GetServiceDefinition(
func getCloudProviderService(
cloudProvider string, serviceId string,
) (*Definition, *model.ApiError) {
cloudServices := supportedServices[cloudProvider]
) (*CloudServiceDetails, *model.ApiError) {
cloudServices := availableServices[cloudProvider]
if cloudServices == nil {
return nil, model.NotFoundError(fmt.Errorf(
"unsupported cloud provider: %s", cloudProvider,
@@ -68,7 +57,7 @@ func GetServiceDefinition(
// Service details read from ./serviceDefinitions
// { "providerName": { "service_id": {...}} }
var supportedServices map[string]map[string]Definition
var availableServices map[string]map[string]CloudServiceDetails
func init() {
err := readAllServiceDefinitions()
@@ -79,15 +68,15 @@ func init() {
}
}
//go:embed definitions/*
var definitionFiles embed.FS
//go:embed serviceDefinitions/*
var serviceDefinitionFiles embed.FS
func readAllServiceDefinitions() error {
supportedServices = map[string]map[string]Definition{}
availableServices = map[string]map[string]CloudServiceDetails{}
rootDirName := "definitions"
rootDirName := "serviceDefinitions"
cloudProviderDirs, err := fs.ReadDir(definitionFiles, rootDirName)
cloudProviderDirs, err := fs.ReadDir(serviceDefinitionFiles, rootDirName)
if err != nil {
return fmt.Errorf("couldn't read dirs in %s: %w", rootDirName, err)
}
@@ -109,21 +98,21 @@ func readAllServiceDefinitions() error {
return fmt.Errorf("no %s services could be read", cloudProvider)
}
supportedServices[cloudProvider] = cloudServices
availableServices[cloudProvider] = cloudServices
}
return nil
}
func readServiceDefinitionsFromDir(cloudProvider string, cloudProviderDirPath string) (
map[string]Definition, error,
map[string]CloudServiceDetails, error,
) {
svcDefDirs, err := fs.ReadDir(definitionFiles, cloudProviderDirPath)
svcDefDirs, err := fs.ReadDir(serviceDefinitionFiles, cloudProviderDirPath)
if err != nil {
return nil, fmt.Errorf("couldn't list integrations dirs: %w", err)
}
svcDefs := map[string]Definition{}
svcDefs := map[string]CloudServiceDetails{}
for _, d := range svcDefDirs {
if !d.IsDir() {
@@ -148,10 +137,10 @@ func readServiceDefinitionsFromDir(cloudProvider string, cloudProviderDirPath st
return svcDefs, nil
}
func readServiceDefinition(cloudProvider string, svcDirpath string) (*Definition, error) {
func readServiceDefinition(cloudProvider string, svcDirpath string) (*CloudServiceDetails, error) {
integrationJsonPath := path.Join(svcDirpath, "integration.json")
serializedSpec, err := definitionFiles.ReadFile(integrationJsonPath)
serializedSpec, err := serviceDefinitionFiles.ReadFile(integrationJsonPath)
if err != nil {
return nil, fmt.Errorf(
"couldn't find integration.json in %s: %w",
@@ -168,7 +157,7 @@ func readServiceDefinition(cloudProvider string, svcDirpath string) (*Definition
}
hydrated, err := integrations.HydrateFileUris(
integrationSpec, definitionFiles, svcDirpath,
integrationSpec, serviceDefinitionFiles, svcDirpath,
)
if err != nil {
return nil, fmt.Errorf(
@@ -178,7 +167,7 @@ func readServiceDefinition(cloudProvider string, svcDirpath string) (*Definition
}
hydratedSpec := hydrated.(map[string]any)
serviceDef, err := ParseStructWithJsonTagsFromMap[Definition](hydratedSpec)
serviceDef, err := ParseStructWithJsonTagsFromMap[CloudServiceDetails](hydratedSpec)
if err != nil {
return nil, fmt.Errorf(
"couldn't parse hydrated JSON spec read from %s: %w",
@@ -191,13 +180,13 @@ func readServiceDefinition(cloudProvider string, svcDirpath string) (*Definition
return nil, fmt.Errorf("invalid service definition %s: %w", serviceDef.Id, err)
}
serviceDef.Strategy.Provider = cloudProvider
serviceDef.TelemetryCollectionStrategy.Provider = cloudProvider
return serviceDef, nil
}
func validateServiceDefinition(s *Definition) error {
func validateServiceDefinition(s *CloudServiceDetails) error {
// Validate dashboard data
seenDashboardIds := map[string]interface{}{}
for _, dd := range s.Assets.Dashboards {
@@ -207,7 +196,7 @@ func validateServiceDefinition(s *Definition) error {
seenDashboardIds[dd.Id] = nil
}
if s.Strategy == nil {
if s.TelemetryCollectionStrategy == nil {
return fmt.Errorf("telemetry_collection_strategy is required")
}

View File

@@ -1,4 +1,4 @@
package services
package cloudintegrations
import (
"testing"
@@ -11,22 +11,22 @@ func TestAvailableServices(t *testing.T) {
require := require.New(t)
// should be able to list available services.
_, apiErr := List("bad-cloud-provider")
_, apiErr := listCloudProviderServices("bad-cloud-provider")
require.NotNil(apiErr)
require.Equal(model.ErrorNotFound, apiErr.Type())
awsSvcs, apiErr := List("aws")
awsSvcs, apiErr := listCloudProviderServices("aws")
require.Nil(apiErr)
require.Greater(len(awsSvcs), 0)
// should be able to get details of a service
_, apiErr = GetServiceDefinition(
_, apiErr = getCloudProviderService(
"aws", "bad-service-id",
)
require.NotNil(apiErr)
require.Equal(model.ErrorNotFound, apiErr.Type())
svc, apiErr := GetServiceDefinition(
svc, apiErr := getCloudProviderService(
"aws", awsSvcs[0].Id,
)
require.Nil(apiErr)

View File

@@ -8,7 +8,6 @@ import (
"strings"
"time"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
@@ -28,18 +27,18 @@ func validateCloudProviderName(name string) *model.ApiError {
type Controller struct {
accountsRepo cloudProviderAccountsRepository
serviceConfigRepo ServiceConfigDatabase
serviceConfigRepo serviceConfigRepository
}
func NewController(sqlStore sqlstore.SQLStore) (
*Controller, error,
) {
accountsRepo, err := newCloudProviderAccountsRepository(sqlStore.SQLxDB())
accountsRepo, err := newCloudProviderAccountsRepository(sqlStore)
if err != nil {
return nil, fmt.Errorf("couldn't create cloud provider accounts repo: %w", err)
}
serviceConfigRepo, err := newServiceConfigRepository(sqlStore.SQLxDB())
serviceConfigRepo, err := newServiceConfigRepository(sqlStore)
if err != nil {
return nil, fmt.Errorf("couldn't create cloud provider service config repo: %w", err)
}
@@ -50,19 +49,12 @@ func NewController(sqlStore sqlstore.SQLStore) (
}, nil
}
type Account struct {
Id string `json:"id"`
CloudAccountId string `json:"cloud_account_id"`
Config AccountConfig `json:"config"`
Status AccountStatus `json:"status"`
}
type ConnectedAccountsListResponse struct {
Accounts []Account `json:"accounts"`
Accounts []types.Account `json:"accounts"`
}
func (c *Controller) ListConnectedAccounts(
ctx context.Context, cloudProvider string,
ctx context.Context, orgId string, cloudProvider string,
) (
*ConnectedAccountsListResponse, *model.ApiError,
) {
@@ -70,14 +62,14 @@ func (c *Controller) ListConnectedAccounts(
return nil, apiErr
}
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, cloudProvider)
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, orgId, cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't list cloud accounts")
}
connectedAccounts := []Account{}
connectedAccounts := []types.Account{}
for _, a := range accountRecords {
connectedAccounts = append(connectedAccounts, a.account())
connectedAccounts = append(connectedAccounts, a.Account())
}
return &ConnectedAccountsListResponse{
@@ -89,7 +81,7 @@ type GenerateConnectionUrlRequest struct {
// Optional. To be specified for updates.
AccountId *string `json:"account_id,omitempty"`
AccountConfig AccountConfig `json:"account_config"`
AccountConfig types.AccountConfig `json:"account_config"`
AgentConfig SigNozAgentConfig `json:"agent_config"`
}
@@ -110,7 +102,7 @@ type GenerateConnectionUrlResponse struct {
}
func (c *Controller) GenerateConnectionUrl(
ctx context.Context, cloudProvider string, req GenerateConnectionUrlRequest,
ctx context.Context, orgId string, cloudProvider string, req GenerateConnectionUrlRequest,
) (*GenerateConnectionUrlResponse, *model.ApiError) {
// Account connection with a simple connection URL may not be available for all providers.
if cloudProvider != "aws" {
@@ -118,7 +110,7 @@ func (c *Controller) GenerateConnectionUrl(
}
account, apiErr := c.accountsRepo.upsert(
ctx, cloudProvider, req.AccountId, &req.AccountConfig, nil, nil, nil,
ctx, orgId, cloudProvider, req.AccountId, &req.AccountConfig, nil, nil, nil,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
@@ -136,7 +128,7 @@ func (c *Controller) GenerateConnectionUrl(
"param_SigNozIntegrationAgentVersion": agentVersion,
"param_SigNozApiUrl": req.AgentConfig.SigNozAPIUrl,
"param_SigNozApiKey": req.AgentConfig.SigNozAPIKey,
"param_SigNozAccountId": account.Id,
"param_SigNozAccountId": account.ID.StringValue(),
"param_IngestionUrl": req.AgentConfig.IngestionUrl,
"param_IngestionKey": req.AgentConfig.IngestionKey,
"stackName": "signoz-integration",
@@ -149,19 +141,19 @@ func (c *Controller) GenerateConnectionUrl(
}
return &GenerateConnectionUrlResponse{
AccountId: account.Id,
AccountId: account.ID.StringValue(),
ConnectionUrl: connectionUrl,
}, nil
}
type AccountStatusResponse struct {
Id string `json:"id"`
CloudAccountId *string `json:"cloud_account_id,omitempty"`
Status AccountStatus `json:"status"`
Id string `json:"id"`
CloudAccountId *string `json:"cloud_account_id,omitempty"`
Status types.AccountStatus `json:"status"`
}
func (c *Controller) GetAccountStatus(
ctx context.Context, cloudProvider string, accountId string,
ctx context.Context, orgId string, cloudProvider string, accountId string,
) (
*AccountStatusResponse, *model.ApiError,
) {
@@ -169,23 +161,23 @@ func (c *Controller) GetAccountStatus(
return nil, apiErr
}
account, apiErr := c.accountsRepo.get(ctx, cloudProvider, accountId)
account, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, accountId)
if apiErr != nil {
return nil, apiErr
}
resp := AccountStatusResponse{
Id: account.Id,
CloudAccountId: account.CloudAccountId,
Status: account.status(),
Id: account.ID.StringValue(),
CloudAccountId: account.AccountID,
Status: account.Status(),
}
return &resp, nil
}
type AgentCheckInRequest struct {
AccountId string `json:"account_id"`
CloudAccountId string `json:"cloud_account_id"`
ID string `json:"account_id"`
AccountID string `json:"cloud_account_id"`
// Arbitrary cloud specific Agent data
Data map[string]any `json:"data,omitempty"`
}
@@ -201,46 +193,46 @@ type AgentCheckInResponse struct {
type IntegrationConfigForAgent struct {
EnabledRegions []string `json:"enabled_regions"`
TelemetryCollectionStrategy *CompiledCollectionStrategy `json:"telemetry,omitempty"`
TelemetryCollectionStrategy *CloudTelemetryCollectionStrategy `json:"telemetry,omitempty"`
}
func (c *Controller) CheckInAsAgent(
ctx context.Context, cloudProvider string, req AgentCheckInRequest,
ctx context.Context, orgId string, cloudProvider string, req AgentCheckInRequest,
) (*AgentCheckInResponse, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
existingAccount, apiErr := c.accountsRepo.get(ctx, cloudProvider, req.AccountId)
if existingAccount != nil && existingAccount.CloudAccountId != nil && *existingAccount.CloudAccountId != req.CloudAccountId {
existingAccount, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, req.ID)
if existingAccount != nil && existingAccount.AccountID != nil && *existingAccount.AccountID != req.AccountID {
return nil, model.BadRequest(fmt.Errorf(
"can't check in with new %s account id %s for account %s with existing %s id %s",
cloudProvider, req.CloudAccountId, existingAccount.Id, cloudProvider, *existingAccount.CloudAccountId,
cloudProvider, req.AccountID, existingAccount.ID.StringValue(), cloudProvider, *existingAccount.AccountID,
))
}
existingAccount, apiErr = c.accountsRepo.getConnectedCloudAccount(ctx, cloudProvider, req.CloudAccountId)
if existingAccount != nil && existingAccount.Id != req.AccountId {
existingAccount, apiErr = c.accountsRepo.getConnectedCloudAccount(ctx, orgId, cloudProvider, req.AccountID)
if existingAccount != nil && existingAccount.ID.StringValue() != req.ID {
return nil, model.BadRequest(fmt.Errorf(
"can't check in to %s account %s with id %s. already connected with id %s",
cloudProvider, req.CloudAccountId, req.AccountId, existingAccount.Id,
cloudProvider, req.AccountID, req.ID, existingAccount.ID.StringValue(),
))
}
agentReport := AgentReport{
agentReport := types.AgentReport{
TimestampMillis: time.Now().UnixMilli(),
Data: req.Data,
}
account, apiErr := c.accountsRepo.upsert(
ctx, cloudProvider, &req.AccountId, nil, &req.CloudAccountId, &agentReport, nil,
ctx, orgId, cloudProvider, &req.ID, nil, &req.AccountID, &agentReport, nil,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
}
// prepare and return integration config to be consumed by agent
compliedStrategy, err := NewCompiledCollectionStrategy(cloudProvider)
telemetryCollectionStrategy, err := NewCloudTelemetryCollectionStrategy(cloudProvider)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't init telemetry collection strategy: %w", err,
@@ -249,20 +241,24 @@ func (c *Controller) CheckInAsAgent(
agentConfig := IntegrationConfigForAgent{
EnabledRegions: []string{},
TelemetryCollectionStrategy: compliedStrategy,
TelemetryCollectionStrategy: telemetryCollectionStrategy,
}
if account.Config != nil && account.Config.EnabledRegions != nil {
agentConfig.EnabledRegions = account.Config.EnabledRegions
}
services, apiErr := services.Map(cloudProvider)
services, apiErr := listCloudProviderServices(cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't list cloud services")
}
svcDetailsById := map[string]*CloudServiceDetails{}
for _, svcDetails := range services {
svcDetailsById[svcDetails.Id] = &svcDetails
}
svcConfigs, apiErr := c.serviceConfigRepo.getAllForAccount(
ctx, cloudProvider, *account.CloudAccountId,
ctx, orgId, account.ID.StringValue(),
)
if apiErr != nil {
return nil, model.WrapApiError(
@@ -275,68 +271,75 @@ func (c *Controller) CheckInAsAgent(
slices.Sort(configuredSvcIds)
for _, svcId := range configuredSvcIds {
definition, ok := services[svcId]
if !ok {
continue
}
config := svcConfigs[svcId]
svcDetails := svcDetailsById[svcId]
svcConfig := svcConfigs[svcId]
err := AddServiceStrategy(compliedStrategy, definition.Strategy, config)
if err != nil {
return nil, model.InternalError(
fmt.Errorf("couldn't add service telemetry collection strategy: %s", err))
if svcDetails != nil {
metricsEnabled := svcConfig.Metrics != nil && svcConfig.Metrics.Enabled
logsEnabled := svcConfig.Logs != nil && svcConfig.Logs.Enabled
if logsEnabled || metricsEnabled {
err := agentConfig.TelemetryCollectionStrategy.AddServiceStrategy(
svcDetails.TelemetryCollectionStrategy, logsEnabled, metricsEnabled,
)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't add service telemetry collection strategy: %w", err,
))
}
}
}
}
return &AgentCheckInResponse{
AccountId: account.Id,
CloudAccountId: *account.CloudAccountId,
AccountId: account.ID.StringValue(),
CloudAccountId: *account.AccountID,
RemovedAt: account.RemovedAt,
IntegrationConfig: agentConfig,
}, nil
}
type UpdateAccountConfigRequest struct {
Config AccountConfig `json:"config"`
Config types.AccountConfig `json:"config"`
}
func (c *Controller) UpdateAccountConfig(
ctx context.Context,
orgId string,
cloudProvider string,
accountId string,
req UpdateAccountConfigRequest,
) (*Account, *model.ApiError) {
) (*types.Account, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
accountRecord, apiErr := c.accountsRepo.upsert(
ctx, cloudProvider, &accountId, &req.Config, nil, nil, nil,
ctx, orgId, cloudProvider, &accountId, &req.Config, nil, nil, nil,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
}
account := accountRecord.account()
account := accountRecord.Account()
return &account, nil
}
func (c *Controller) DisconnectAccount(
ctx context.Context, cloudProvider string, accountId string,
) (*AccountRecord, *model.ApiError) {
ctx context.Context, orgId string, cloudProvider string, accountId string,
) (*types.CloudIntegration, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
account, apiErr := c.accountsRepo.get(ctx, cloudProvider, accountId)
account, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, accountId)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't disconnect account")
}
tsNow := time.Now()
account, apiErr = c.accountsRepo.upsert(
ctx, cloudProvider, &accountId, nil, nil, nil, &tsNow,
ctx, orgId, cloudProvider, &accountId, nil, nil, nil, &tsNow,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't disconnect account")
@@ -346,27 +349,35 @@ func (c *Controller) DisconnectAccount(
}
type ListServicesResponse struct {
Services []ServiceSummary `json:"services"`
Services []CloudServiceSummary `json:"services"`
}
func (c *Controller) ListServices(
ctx context.Context,
orgID string,
cloudProvider string,
cloudAccountId *string,
) (*ListServicesResponse, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
definitions, apiErr := services.List(cloudProvider)
services, apiErr := listCloudProviderServices(cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't list cloud services")
}
svcConfigs := map[string]*ServiceConfig{}
svcConfigs := map[string]*types.CloudServiceConfig{}
if cloudAccountId != nil {
activeAccount, apiErr := c.accountsRepo.getConnectedCloudAccount(
ctx, orgID, cloudProvider, *cloudAccountId,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't get active account")
}
svcConfigs, apiErr = c.serviceConfigRepo.getAllForAccount(
ctx, cloudProvider, *cloudAccountId,
ctx, orgID, activeAccount.ID.StringValue(),
)
if apiErr != nil {
return nil, model.WrapApiError(
@@ -375,11 +386,9 @@ func (c *Controller) ListServices(
}
}
summaries := []ServiceSummary{}
for _, def := range definitions {
summary := ServiceSummary{
Metadata: def.Metadata,
}
summaries := []CloudServiceSummary{}
for _, s := range services {
summary := s.CloudServiceSummary
summary.Config = svcConfigs[summary.Id]
summaries = append(summaries, summary)
@@ -392,88 +401,102 @@ func (c *Controller) ListServices(
func (c *Controller) GetServiceDetails(
ctx context.Context,
orgID string,
cloudProvider string,
serviceId string,
cloudAccountId *string,
) (*ServiceDetails, *model.ApiError) {
) (*CloudServiceDetails, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
definition, apiErr := services.GetServiceDefinition(cloudProvider, serviceId)
service, apiErr := getCloudProviderService(cloudProvider, serviceId)
if apiErr != nil {
return nil, apiErr
}
details := ServiceDetails{
Definition: *definition,
}
if cloudAccountId != nil {
activeAccount, apiErr := c.accountsRepo.getConnectedCloudAccount(
ctx, orgID, cloudProvider, *cloudAccountId,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't get active account")
}
config, apiErr := c.serviceConfigRepo.get(
ctx, cloudProvider, *cloudAccountId, serviceId,
ctx, orgID, activeAccount.ID.StringValue(), serviceId,
)
if apiErr != nil && apiErr.Type() != model.ErrorNotFound {
return nil, model.WrapApiError(apiErr, "couldn't fetch service config")
}
if config != nil {
details.Config = config
service.Config = config
enabled := false
if config.Metrics != nil && config.Metrics.Enabled {
// add links to service dashboards, making them clickable.
for i, d := range details.Assets.Dashboards {
dashboardUuid := c.dashboardUuid(
cloudProvider, serviceId, d.Id,
)
details.Assets.Dashboards[i].Url = fmt.Sprintf(
enabled = true
}
// add links to service dashboards, making them clickable.
for i, d := range service.Assets.Dashboards {
dashboardUuid := c.dashboardUuid(
cloudProvider, serviceId, d.Id,
)
if enabled {
service.Assets.Dashboards[i].Url = fmt.Sprintf(
"/dashboard/%s", dashboardUuid,
)
} else {
service.Assets.Dashboards[i].Url = ""
}
}
}
}
return &details, nil
return service, nil
}
type UpdateServiceConfigRequest struct {
CloudAccountId string `json:"cloud_account_id"`
Config ServiceConfig `json:"config"`
CloudAccountId string `json:"cloud_account_id"`
Config types.CloudServiceConfig `json:"config"`
}
type UpdateServiceConfigResponse struct {
Id string `json:"id"`
Config ServiceConfig `json:"config"`
Id string `json:"id"`
Config types.CloudServiceConfig `json:"config"`
}
func (c *Controller) UpdateServiceConfig(
ctx context.Context,
orgID string,
cloudProvider string,
serviceId string,
req UpdateServiceConfigRequest,
) (*UpdateServiceConfigResponse, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
// can only update config for a connected cloud account id
_, apiErr := c.accountsRepo.getConnectedCloudAccount(
ctx, cloudProvider, req.CloudAccountId,
ctx, orgID, cloudProvider, req.CloudAccountId,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't find connected cloud account")
}
// can only update config for a valid service.
_, apiErr = services.GetServiceDefinition(cloudProvider, serviceId)
_, apiErr = getCloudProviderService(cloudProvider, serviceId)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "unsupported service")
}
updatedConfig, apiErr := c.serviceConfigRepo.upsert(
ctx, cloudProvider, req.CloudAccountId, serviceId, req.Config,
ctx, orgID, cloudProvider, req.CloudAccountId, serviceId, req.Config,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't update service config")
@@ -487,13 +510,13 @@ func (c *Controller) UpdateServiceConfig(
// All dashboards that are available based on cloud integrations configuration
// across all cloud providers
func (c *Controller) AvailableDashboards(ctx context.Context) (
func (c *Controller) AvailableDashboards(ctx context.Context, orgId string) (
[]types.Dashboard, *model.ApiError,
) {
allDashboards := []types.Dashboard{}
for _, provider := range []string{"aws"} {
providerDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, provider)
providerDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, orgId, provider)
if apiErr != nil {
return nil, model.WrapApiError(
apiErr, fmt.Sprintf("couldn't get available dashboards for %s", provider),
@@ -507,10 +530,10 @@ func (c *Controller) AvailableDashboards(ctx context.Context) (
}
func (c *Controller) AvailableDashboardsForCloudProvider(
ctx context.Context, cloudProvider string,
ctx context.Context, orgID string, cloudProvider string,
) ([]types.Dashboard, *model.ApiError) {
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, cloudProvider)
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, orgID, cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't list connected cloud accounts")
}
@@ -519,9 +542,9 @@ func (c *Controller) AvailableDashboardsForCloudProvider(
servicesWithAvailableMetrics := map[string]*time.Time{}
for _, ar := range accountRecords {
if ar.CloudAccountId != nil {
if ar.AccountID != nil {
configsBySvcId, apiErr := c.serviceConfigRepo.getAllForAccount(
ctx, cloudProvider, *ar.CloudAccountId,
ctx, orgID, ar.ID.StringValue(),
)
if apiErr != nil {
return nil, apiErr
@@ -535,7 +558,7 @@ func (c *Controller) AvailableDashboardsForCloudProvider(
}
}
allServices, apiErr := services.List(cloudProvider)
allServices, apiErr := listCloudProviderServices(cloudProvider)
if apiErr != nil {
return nil, apiErr
}
@@ -569,6 +592,7 @@ func (c *Controller) AvailableDashboardsForCloudProvider(
}
func (c *Controller) GetDashboardById(
ctx context.Context,
orgId string,
dashboardUuid string,
) (*types.Dashboard, *model.ApiError) {
cloudProvider, _, _, apiErr := c.parseDashboardUuid(dashboardUuid)
@@ -576,7 +600,7 @@ func (c *Controller) GetDashboardById(
return nil, apiErr
}
allDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, cloudProvider)
allDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, orgId, cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(
apiErr, fmt.Sprintf("couldn't list available dashboards"),

View File

@@ -4,23 +4,30 @@ import (
"context"
"testing"
"github.com/SigNoz/signoz/pkg/query-service/auth"
"github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/dao"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/types"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
)
func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
require := require.New(t)
sqlStore, _ := utils.NewTestSqliteDB(t)
sqlStore := utils.NewQueryServiceDBForTests(t)
controller, err := NewController(sqlStore)
require.NoError(err)
user, apiErr := createTestUser()
require.Nil(apiErr)
// should be able to generate connection url for
// same account id again with updated config
testAccountConfig1 := AccountConfig{EnabledRegions: []string{"us-east-1", "us-west-1"}}
testAccountConfig1 := types.AccountConfig{EnabledRegions: []string{"us-east-1", "us-west-1"}}
resp1, apiErr := controller.GenerateConnectionUrl(
context.TODO(), "aws", GenerateConnectionUrlRequest{
context.TODO(), user.OrgID, "aws", GenerateConnectionUrlRequest{
AccountConfig: testAccountConfig1,
AgentConfig: SigNozAgentConfig{Region: "us-east-2"},
},
@@ -31,14 +38,14 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
testAccountId := resp1.AccountId
account, apiErr := controller.accountsRepo.get(
context.TODO(), "aws", testAccountId,
context.TODO(), user.OrgID, "aws", testAccountId,
)
require.Nil(apiErr)
require.Equal(testAccountConfig1, *account.Config)
testAccountConfig2 := AccountConfig{EnabledRegions: []string{"us-east-2", "us-west-2"}}
testAccountConfig2 := types.AccountConfig{EnabledRegions: []string{"us-east-2", "us-west-2"}}
resp2, apiErr := controller.GenerateConnectionUrl(
context.TODO(), "aws", GenerateConnectionUrlRequest{
context.TODO(), user.OrgID, "aws", GenerateConnectionUrlRequest{
AccountId: &testAccountId,
AccountConfig: testAccountConfig2,
AgentConfig: SigNozAgentConfig{Region: "us-east-2"},
@@ -48,7 +55,7 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
require.Equal(testAccountId, resp2.AccountId)
account, apiErr = controller.accountsRepo.get(
context.TODO(), "aws", testAccountId,
context.TODO(), user.OrgID, "aws", testAccountId,
)
require.Nil(apiErr)
require.Equal(testAccountConfig2, *account.Config)
@@ -56,18 +63,21 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
func TestAgentCheckIns(t *testing.T) {
require := require.New(t)
sqlStore, _ := utils.NewTestSqliteDB(t)
sqlStore := utils.NewQueryServiceDBForTests(t)
controller, err := NewController(sqlStore)
require.NoError(err)
user, apiErr := createTestUser()
require.Nil(apiErr)
// An agent should be able to check in from a cloud account even
// if no connection url was requested (no account with agent's account id exists)
testAccountId1 := uuid.NewString()
testCloudAccountId1 := "546311234"
resp1, apiErr := controller.CheckInAsAgent(
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId1,
CloudAccountId: testCloudAccountId1,
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
ID: testAccountId1,
AccountID: testCloudAccountId1,
},
)
require.Nil(apiErr)
@@ -78,9 +88,9 @@ func TestAgentCheckIns(t *testing.T) {
// cloud account id for the same account.
testCloudAccountId2 := "99999999"
_, apiErr = controller.CheckInAsAgent(
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId1,
CloudAccountId: testCloudAccountId2,
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
ID: testAccountId1,
AccountID: testCloudAccountId2,
},
)
require.NotNil(apiErr)
@@ -90,18 +100,18 @@ func TestAgentCheckIns(t *testing.T) {
// i.e. there can't be 2 connected account records for the same cloud account id
// at any point in time.
existingConnected, apiErr := controller.accountsRepo.getConnectedCloudAccount(
context.TODO(), "aws", testCloudAccountId1,
context.TODO(), user.OrgID, "aws", testCloudAccountId1,
)
require.Nil(apiErr)
require.NotNil(existingConnected)
require.Equal(testCloudAccountId1, *existingConnected.CloudAccountId)
require.Equal(testCloudAccountId1, *existingConnected.AccountID)
require.Nil(existingConnected.RemovedAt)
testAccountId2 := uuid.NewString()
_, apiErr = controller.CheckInAsAgent(
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId2,
CloudAccountId: testCloudAccountId1,
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
ID: testAccountId2,
AccountID: testCloudAccountId1,
},
)
require.NotNil(apiErr)
@@ -109,29 +119,29 @@ func TestAgentCheckIns(t *testing.T) {
// After disconnecting existing account record, the agent should be able to
// connected for a particular cloud account id
_, apiErr = controller.DisconnectAccount(
context.TODO(), "aws", testAccountId1,
context.TODO(), user.OrgID, "aws", testAccountId1,
)
existingConnected, apiErr = controller.accountsRepo.getConnectedCloudAccount(
context.TODO(), "aws", testCloudAccountId1,
context.TODO(), user.OrgID, "aws", testCloudAccountId1,
)
require.Nil(existingConnected)
require.NotNil(apiErr)
require.Equal(model.ErrorNotFound, apiErr.Type())
_, apiErr = controller.CheckInAsAgent(
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId2,
CloudAccountId: testCloudAccountId1,
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
ID: testAccountId2,
AccountID: testCloudAccountId1,
},
)
require.Nil(apiErr)
// should be able to keep checking in
_, apiErr = controller.CheckInAsAgent(
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId2,
CloudAccountId: testCloudAccountId1,
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
ID: testAccountId2,
AccountID: testCloudAccountId1,
},
)
require.Nil(apiErr)
@@ -139,13 +149,16 @@ func TestAgentCheckIns(t *testing.T) {
func TestCantDisconnectNonExistentAccount(t *testing.T) {
require := require.New(t)
sqlStore, _ := utils.NewTestSqliteDB(t)
sqlStore := utils.NewQueryServiceDBForTests(t)
controller, err := NewController(sqlStore)
require.NoError(err)
user, apiErr := createTestUser()
require.Nil(apiErr)
// Attempting to disconnect a non-existent account should return error
account, apiErr := controller.DisconnectAccount(
context.TODO(), "aws", uuid.NewString(),
context.TODO(), user.OrgID, "aws", uuid.NewString(),
)
require.NotNil(apiErr)
require.Equal(model.ErrorNotFound, apiErr.Type())
@@ -154,15 +167,23 @@ func TestCantDisconnectNonExistentAccount(t *testing.T) {
func TestConfigureService(t *testing.T) {
require := require.New(t)
sqlStore, _ := utils.NewTestSqliteDB(t)
sqlStore := utils.NewQueryServiceDBForTests(t)
controller, err := NewController(sqlStore)
require.NoError(err)
user, apiErr := createTestUser()
require.Nil(apiErr)
// create a connected account
testCloudAccountId := "546311234"
testConnectedAccount := makeTestConnectedAccount(t, user.OrgID, controller, testCloudAccountId)
require.Nil(testConnectedAccount.RemovedAt)
require.NotEmpty(testConnectedAccount.AccountID)
require.Equal(testCloudAccountId, *testConnectedAccount.AccountID)
// should start out without any service config
svcListResp, apiErr := controller.ListServices(
context.TODO(), "aws", &testCloudAccountId,
context.TODO(), user.OrgID, "aws", &testCloudAccountId,
)
require.Nil(apiErr)
@@ -170,25 +191,20 @@ func TestConfigureService(t *testing.T) {
require.Nil(svcListResp.Services[0].Config)
svcDetails, apiErr := controller.GetServiceDetails(
context.TODO(), "aws", testSvcId, &testCloudAccountId,
context.TODO(), user.OrgID, "aws", testSvcId, &testCloudAccountId,
)
require.Nil(apiErr)
require.Equal(testSvcId, svcDetails.Id)
require.Nil(svcDetails.Config)
// should be able to configure a service for a connected account
testConnectedAccount := makeTestConnectedAccount(t, controller, testCloudAccountId)
require.Nil(testConnectedAccount.RemovedAt)
require.NotNil(testConnectedAccount.CloudAccountId)
require.Equal(testCloudAccountId, *testConnectedAccount.CloudAccountId)
testSvcConfig := ServiceConfig{
Metrics: &MetricsConfig{
testSvcConfig := types.CloudServiceConfig{
Metrics: &types.CloudServiceMetricsConfig{
Enabled: true,
},
}
updateSvcConfigResp, apiErr := controller.UpdateServiceConfig(
context.TODO(), "aws", testSvcId, UpdateServiceConfigRequest{
context.TODO(), user.OrgID, "aws", testSvcId, UpdateServiceConfigRequest{
CloudAccountId: testCloudAccountId,
Config: testSvcConfig,
},
@@ -198,14 +214,14 @@ func TestConfigureService(t *testing.T) {
require.Equal(testSvcConfig, updateSvcConfigResp.Config)
svcDetails, apiErr = controller.GetServiceDetails(
context.TODO(), "aws", testSvcId, &testCloudAccountId,
context.TODO(), user.OrgID, "aws", testSvcId, &testCloudAccountId,
)
require.Nil(apiErr)
require.Equal(testSvcId, svcDetails.Id)
require.Equal(testSvcConfig, *svcDetails.Config)
svcListResp, apiErr = controller.ListServices(
context.TODO(), "aws", &testCloudAccountId,
context.TODO(), user.OrgID, "aws", &testCloudAccountId,
)
require.Nil(apiErr)
for _, svc := range svcListResp.Services {
@@ -216,12 +232,12 @@ func TestConfigureService(t *testing.T) {
// should not be able to configure service after cloud account has been disconnected
_, apiErr = controller.DisconnectAccount(
context.TODO(), "aws", testConnectedAccount.Id,
context.TODO(), user.OrgID, "aws", testConnectedAccount.ID.StringValue(),
)
require.Nil(apiErr)
_, apiErr = controller.UpdateServiceConfig(
context.TODO(), "aws", testSvcId,
context.TODO(), user.OrgID, "aws", testSvcId,
UpdateServiceConfigRequest{
CloudAccountId: testCloudAccountId,
Config: testSvcConfig,
@@ -231,7 +247,7 @@ func TestConfigureService(t *testing.T) {
// should not be able to configure a service for a cloud account id that is not connected yet
_, apiErr = controller.UpdateServiceConfig(
context.TODO(), "aws", testSvcId,
context.TODO(), user.OrgID, "aws", testSvcId,
UpdateServiceConfigRequest{
CloudAccountId: "9999999999",
Config: testSvcConfig,
@@ -241,7 +257,7 @@ func TestConfigureService(t *testing.T) {
// should not be able to set config for an unsupported service
_, apiErr = controller.UpdateServiceConfig(
context.TODO(), "aws", "bad-service", UpdateServiceConfigRequest{
context.TODO(), user.OrgID, "aws", "bad-service", UpdateServiceConfigRequest{
CloudAccountId: testCloudAccountId,
Config: testSvcConfig,
},
@@ -250,22 +266,54 @@ func TestConfigureService(t *testing.T) {
}
func makeTestConnectedAccount(t *testing.T, controller *Controller, cloudAccountId string) *AccountRecord {
func makeTestConnectedAccount(t *testing.T, orgId string, controller *Controller, cloudAccountId string) *types.CloudIntegration {
require := require.New(t)
// a check in from SigNoz agent creates or updates a connected account.
testAccountId := uuid.NewString()
resp, apiErr := controller.CheckInAsAgent(
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId,
CloudAccountId: cloudAccountId,
context.TODO(), orgId, "aws", AgentCheckInRequest{
ID: testAccountId,
AccountID: cloudAccountId,
},
)
require.Nil(apiErr)
require.Equal(testAccountId, resp.AccountId)
require.Equal(cloudAccountId, resp.CloudAccountId)
acc, err := controller.accountsRepo.get(context.TODO(), "aws", resp.AccountId)
acc, err := controller.accountsRepo.get(context.TODO(), orgId, "aws", resp.AccountId)
require.Nil(err)
return acc
}
func createTestUser() (*types.User, *model.ApiError) {
// Create a test user for auth
ctx := context.Background()
org, apiErr := dao.DB().CreateOrg(ctx, &types.Organization{
Name: "test",
})
if apiErr != nil {
return nil, apiErr
}
group, apiErr := dao.DB().GetGroupByName(ctx, constants.AdminGroup)
if apiErr != nil {
return nil, apiErr
}
auth.InitAuthCache(ctx)
userId := uuid.NewString()
return dao.DB().CreateUser(
ctx,
&types.User{
ID: userId,
Name: "test",
Email: userId[:8] + "test@test.com",
Password: "test",
OrgID: org.ID,
GroupID: group.ID,
},
true,
)
}

View File

@@ -1,176 +1,74 @@
package cloudintegrations
import (
"database/sql/driver"
"encoding/json"
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
"github.com/SigNoz/signoz/pkg/types"
)
type ServiceSummary struct {
services.Metadata
type CloudServiceSummary struct {
Id string `json:"id"`
Title string `json:"title"`
Icon string `json:"icon"`
Config *ServiceConfig `json:"config"`
// Present only if the service has been configured in the
// context of a cloud provider account.
Config *types.CloudServiceConfig `json:"config,omitempty"`
}
type ServiceDetails struct {
services.Definition
type CloudServiceDetails struct {
CloudServiceSummary
Config *ServiceConfig `json:"config"`
ConnectionStatus *ServiceConnectionStatus `json:"status,omitempty"`
Overview string `json:"overview"` // markdown
Assets CloudServiceAssets `json:"assets"`
SupportedSignals SupportedSignals `json:"supported_signals"`
DataCollected DataCollectedForService `json:"data_collected"`
ConnectionStatus *CloudServiceConnectionStatus `json:"status,omitempty"`
TelemetryCollectionStrategy *CloudTelemetryCollectionStrategy `json:"telemetry_collection_strategy"`
}
// Represents a cloud provider account for cloud integrations
type AccountRecord struct {
CloudProvider string `json:"cloud_provider" db:"cloud_provider"`
Id string `json:"id" db:"id"`
Config *AccountConfig `json:"config" db:"config_json"`
CloudAccountId *string `json:"cloud_account_id" db:"cloud_account_id"`
LastAgentReport *AgentReport `json:"last_agent_report" db:"last_agent_report_json"`
CreatedAt time.Time `json:"created_at" db:"created_at"`
RemovedAt *time.Time `json:"removed_at" db:"removed_at"`
type CloudServiceAssets struct {
Dashboards []CloudServiceDashboard `json:"dashboards"`
}
type AccountConfig struct {
EnabledRegions []string `json:"regions"`
type CloudServiceDashboard struct {
Id string `json:"id"`
Url string `json:"url"`
Title string `json:"title"`
Description string `json:"description"`
Image string `json:"image"`
Definition *types.DashboardData `json:"definition,omitempty"`
}
func DefaultAccountConfig() AccountConfig {
return AccountConfig{
EnabledRegions: []string{},
}
type SupportedSignals struct {
Logs bool `json:"logs"`
Metrics bool `json:"metrics"`
}
// For serializing from db
func (c *AccountConfig) Scan(src any) error {
data, ok := src.([]byte)
if !ok {
return fmt.Errorf("tried to scan from %T instead of bytes", src)
}
return json.Unmarshal(data, &c)
type DataCollectedForService struct {
Logs []CollectedLogAttribute `json:"logs"`
Metrics []CollectedMetric `json:"metrics"`
}
// For serializing to db
func (c *AccountConfig) Value() (driver.Value, error) {
if c == nil {
return nil, nil
}
serialized, err := json.Marshal(c)
if err != nil {
return nil, fmt.Errorf(
"couldn't serialize cloud account config to JSON: %w", err,
)
}
return serialized, nil
type CollectedLogAttribute struct {
Name string `json:"name"`
Path string `json:"path"`
Type string `json:"type"`
}
type AgentReport struct {
TimestampMillis int64 `json:"timestamp_millis"`
Data map[string]any `json:"data"`
type CollectedMetric struct {
Name string `json:"name"`
Type string `json:"type"`
Unit string `json:"unit"`
Description string `json:"description"`
}
// For serializing from db
func (r *AgentReport) Scan(src any) error {
data, ok := src.([]byte)
if !ok {
return fmt.Errorf("tried to scan from %T instead of bytes", src)
}
return json.Unmarshal(data, &r)
}
// For serializing to db
func (r *AgentReport) Value() (driver.Value, error) {
if r == nil {
return nil, nil
}
serialized, err := json.Marshal(r)
if err != nil {
return nil, fmt.Errorf(
"couldn't serialize agent report to JSON: %w", err,
)
}
return serialized, nil
}
type AccountStatus struct {
Integration AccountIntegrationStatus `json:"integration"`
}
type AccountIntegrationStatus struct {
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
}
func (a *AccountRecord) status() AccountStatus {
status := AccountStatus{}
if a.LastAgentReport != nil {
lastHeartbeat := a.LastAgentReport.TimestampMillis
status.Integration.LastHeartbeatTsMillis = &lastHeartbeat
}
return status
}
func (a *AccountRecord) account() Account {
ca := Account{Id: a.Id, Status: a.status()}
if a.CloudAccountId != nil {
ca.CloudAccountId = *a.CloudAccountId
}
if a.Config != nil {
ca.Config = *a.Config
} else {
ca.Config = DefaultAccountConfig()
}
return ca
}
type ServiceConfig struct {
Logs *LogsConfig `json:"logs,omitempty"`
Metrics *MetricsConfig `json:"metrics,omitempty"`
}
// Serialization from db
func (c *ServiceConfig) Scan(src any) error {
data, ok := src.([]byte)
if !ok {
return fmt.Errorf("tried to scan from %T instead of bytes", src)
}
return json.Unmarshal(data, &c)
}
// Serializing to db
func (c *ServiceConfig) Value() (driver.Value, error) {
if c == nil {
return nil, nil
}
serialized, err := json.Marshal(c)
if err != nil {
return nil, fmt.Errorf(
"couldn't serialize cloud service config to JSON: %w", err,
)
}
return serialized, nil
}
type LogsConfig struct {
Enabled bool `json:"enabled"`
S3Buckets map[string][]string `json:"s3_buckets,omitempty"`
}
type MetricsConfig struct {
Enabled bool `json:"enabled"`
}
type ServiceConnectionStatus struct {
type CloudServiceConnectionStatus struct {
Logs *SignalConnectionStatus `json:"logs"`
Metrics *SignalConnectionStatus `json:"metrics"`
}
@@ -180,14 +78,23 @@ type SignalConnectionStatus struct {
LastReceivedFrom string `json:"last_received_from"` // resource identifier
}
type CompiledCollectionStrategy = services.CollectionStrategy
type CloudTelemetryCollectionStrategy struct {
Provider string `json:"provider"`
func NewCompiledCollectionStrategy(provider string) (*CompiledCollectionStrategy, error) {
AWSMetrics *AWSMetricsCollectionStrategy `json:"aws_metrics,omitempty"`
AWSLogs *AWSLogsCollectionStrategy `json:"aws_logs,omitempty"`
}
func NewCloudTelemetryCollectionStrategy(provider string) (*CloudTelemetryCollectionStrategy, error) {
if provider == "aws" {
return &CompiledCollectionStrategy{
Provider: "aws",
AWSMetrics: &services.AWSMetricsStrategy{},
AWSLogs: &services.AWSLogsStrategy{},
return &CloudTelemetryCollectionStrategy{
Provider: "aws",
AWSMetrics: &AWSMetricsCollectionStrategy{
CloudwatchMetricsStreamFilters: []CloudwatchMetricStreamFilter{},
},
AWSLogs: &AWSLogsCollectionStrategy{
CloudwatchLogsSubscriptions: []CloudwatchLogsSubscriptionConfig{},
},
}, nil
}
@@ -195,27 +102,24 @@ func NewCompiledCollectionStrategy(provider string) (*CompiledCollectionStrategy
}
// Helper for accumulating strategies for enabled services.
func AddServiceStrategy(cs *CompiledCollectionStrategy,
definitionStrat *services.CollectionStrategy, config *ServiceConfig) error {
if definitionStrat.Provider != cs.Provider {
func (cs *CloudTelemetryCollectionStrategy) AddServiceStrategy(
svcStrategy *CloudTelemetryCollectionStrategy,
logsEnabled bool,
metricsEnabled bool,
) error {
if svcStrategy.Provider != cs.Provider {
return fmt.Errorf(
"can't add %s service strategy to strategy for %s",
definitionStrat.Provider, cs.Provider,
svcStrategy.Provider, cs.Provider,
)
}
if cs.Provider == "aws" {
if config.Logs != nil && config.Logs.Enabled && definitionStrat.AWSLogs != nil {
cs.AWSLogs.Subscriptions = append(
cs.AWSLogs.Subscriptions,
definitionStrat.AWSLogs.Subscriptions...,
)
if logsEnabled {
cs.AWSLogs.AddServiceStrategy(svcStrategy.AWSLogs)
}
if config.Metrics != nil && config.Metrics.Enabled && definitionStrat.AWSMetrics != nil {
cs.AWSMetrics.StreamFilters = append(
cs.AWSMetrics.StreamFilters,
definitionStrat.AWSMetrics.StreamFilters...,
)
if metricsEnabled {
cs.AWSMetrics.AddServiceStrategy(svcStrategy.AWSMetrics)
}
return nil
}
@@ -223,3 +127,57 @@ func AddServiceStrategy(cs *CompiledCollectionStrategy,
return fmt.Errorf("unsupported cloud provider: %s", cs.Provider)
}
type AWSMetricsCollectionStrategy struct {
// to be used as https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-metricstream.html#cfn-cloudwatch-metricstream-includefilters
CloudwatchMetricsStreamFilters []CloudwatchMetricStreamFilter `json:"cloudwatch_metric_stream_filters"`
}
type CloudwatchMetricStreamFilter struct {
// json tags here are in the shape expected by AWS API as detailed at
// https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-metricstream-metricstreamfilter.html
Namespace string `json:"Namespace"`
MetricNames []string `json:"MetricNames,omitempty"`
}
func (amc *AWSMetricsCollectionStrategy) AddServiceStrategy(
svcStrategy *AWSMetricsCollectionStrategy,
) error {
if svcStrategy == nil {
return nil
}
amc.CloudwatchMetricsStreamFilters = append(
amc.CloudwatchMetricsStreamFilters,
svcStrategy.CloudwatchMetricsStreamFilters...,
)
return nil
}
type AWSLogsCollectionStrategy struct {
CloudwatchLogsSubscriptions []CloudwatchLogsSubscriptionConfig `json:"cloudwatch_logs_subscriptions"`
}
type CloudwatchLogsSubscriptionConfig struct {
// subscribe to all logs groups with specified prefix.
// eg: `/aws/rds/`
LogGroupNamePrefix string `json:"log_group_name_prefix"`
// https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html
// "" implies no filtering is required.
FilterPattern string `json:"filter_pattern"`
}
func (alc *AWSLogsCollectionStrategy) AddServiceStrategy(
svcStrategy *AWSLogsCollectionStrategy,
) error {
if svcStrategy == nil {
return nil
}
alc.CloudwatchLogsSubscriptions = append(
alc.CloudwatchLogsSubscriptions,
svcStrategy.CloudwatchLogsSubscriptions...,
)
return nil
}

View File

@@ -0,0 +1,165 @@
package cloudintegrations
import (
"context"
"database/sql"
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
)
type serviceConfigRepository interface {
get(
ctx context.Context,
orgID string,
cloudAccountId string,
serviceType string,
) (*types.CloudServiceConfig, *model.ApiError)
upsert(
ctx context.Context,
orgID string,
cloudProvider string,
cloudAccountId string,
serviceId string,
config types.CloudServiceConfig,
) (*types.CloudServiceConfig, *model.ApiError)
getAllForAccount(
ctx context.Context,
orgID string,
cloudAccountId string,
) (
configsBySvcId map[string]*types.CloudServiceConfig,
apiErr *model.ApiError,
)
}
func newServiceConfigRepository(store sqlstore.SQLStore) (
*serviceConfigSQLRepository, error,
) {
return &serviceConfigSQLRepository{
store: store,
}, nil
}
type serviceConfigSQLRepository struct {
store sqlstore.SQLStore
}
func (r *serviceConfigSQLRepository) get(
ctx context.Context,
orgID string,
cloudAccountId string,
serviceType string,
) (*types.CloudServiceConfig, *model.ApiError) {
var result types.CloudIntegrationService
err := r.store.BunDB().NewSelect().
Model(&result).
Join("JOIN cloud_integration ci ON ci.id = cis.cloud_integration_id").
Where("ci.org_id = ?", orgID).
Where("ci.id = ?", cloudAccountId).
Where("cis.type = ?", serviceType).
Scan(ctx)
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
"couldn't find config for cloud account %s",
cloudAccountId,
))
} else if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud service config: %w", err,
))
}
return &result.Config, nil
}
func (r *serviceConfigSQLRepository) upsert(
ctx context.Context,
orgID string,
cloudProvider string,
cloudAccountId string,
serviceId string,
config types.CloudServiceConfig,
) (*types.CloudServiceConfig, *model.ApiError) {
// get cloud integration id from account id
// if the account is not connected, we don't need to upsert the config
var cloudIntegrationId string
err := r.store.BunDB().NewSelect().
Model((*types.CloudIntegration)(nil)).
Column("id").
Where("provider = ?", cloudProvider).
Where("account_id = ?", cloudAccountId).
Where("org_id = ?", orgID).
Where("removed_at is NULL").
Where("last_agent_report is not NULL").
Scan(ctx, &cloudIntegrationId)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud integration id: %w", err,
))
}
serviceConfig := types.CloudIntegrationService{
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
Config: config,
Type: serviceId,
CloudIntegrationID: cloudIntegrationId,
}
_, err = r.store.BunDB().NewInsert().
Model(&serviceConfig).
On("conflict(cloud_integration_id, type) do update set config=excluded.config, updated_at=excluded.updated_at").
Exec(ctx)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not upsert cloud service config: %w", err,
))
}
return &serviceConfig.Config, nil
}
func (r *serviceConfigSQLRepository) getAllForAccount(
ctx context.Context,
orgID string,
cloudAccountId string,
) (map[string]*types.CloudServiceConfig, *model.ApiError) {
serviceConfigs := []types.CloudIntegrationService{}
err := r.store.BunDB().NewSelect().
Model(&serviceConfigs).
Join("JOIN cloud_integration ci ON ci.id = cis.cloud_integration_id").
Where("ci.id = ?", cloudAccountId).
Where("ci.org_id = ?", orgID).
Scan(ctx)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not query service configs from db: %w", err,
))
}
result := map[string]*types.CloudServiceConfig{}
for _, r := range serviceConfigs {
result[r.Type] = &r.Config
}
return result, nil
}

View File

@@ -1,161 +0,0 @@
package cloudintegrations
import (
"context"
"database/sql"
"fmt"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/jmoiron/sqlx"
)
type ServiceConfigDatabase interface {
get(
ctx context.Context,
cloudProvider string,
cloudAccountId string,
serviceId string,
) (*ServiceConfig, *model.ApiError)
upsert(
ctx context.Context,
cloudProvider string,
cloudAccountId string,
serviceId string,
config ServiceConfig,
) (*ServiceConfig, *model.ApiError)
getAllForAccount(
ctx context.Context,
cloudProvider string,
cloudAccountId string,
) (
configsBySvcId map[string]*ServiceConfig,
apiErr *model.ApiError,
)
}
func newServiceConfigRepository(db *sqlx.DB) (
*serviceConfigSQLRepository, error,
) {
return &serviceConfigSQLRepository{
db: db,
}, nil
}
type serviceConfigSQLRepository struct {
db *sqlx.DB
}
func (r *serviceConfigSQLRepository) get(
ctx context.Context,
cloudProvider string,
cloudAccountId string,
serviceId string,
) (*ServiceConfig, *model.ApiError) {
var result ServiceConfig
err := r.db.GetContext(
ctx, &result, `
select
config_json
from cloud_integrations_service_configs
where
cloud_provider=$1
and cloud_account_id=$2
and service_id=$3
`,
cloudProvider, cloudAccountId, serviceId,
)
if err != nil {
if err == sql.ErrNoRows {
return nil, model.NotFoundError(
fmt.Errorf("couldn't find %s %s config for %s", cloudProvider, serviceId, cloudAccountId))
}
return nil, model.InternalError(fmt.Errorf("couldn't query cloud service config: %w", err))
}
return &result, nil
}
func (r *serviceConfigSQLRepository) upsert(
ctx context.Context,
cloudProvider string,
cloudAccountId string,
serviceId string,
config ServiceConfig,
) (*ServiceConfig, *model.ApiError) {
query := `
INSERT INTO cloud_integrations_service_configs (
cloud_provider,
cloud_account_id,
service_id,
config_json
) values ($1, $2, $3, $4)
on conflict(cloud_provider, cloud_account_id, service_id)
do update set config_json=excluded.config_json
`
_, dbErr := r.db.ExecContext(
ctx, query,
cloudProvider, cloudAccountId, serviceId, &config,
)
if dbErr != nil {
return nil, model.InternalError(fmt.Errorf(
"could not upsert cloud service config: %w", dbErr,
))
}
upsertedConfig, apiErr := r.get(ctx, cloudProvider, cloudAccountId, serviceId)
if apiErr != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't fetch upserted service config: %w", apiErr.ToError(),
))
}
return upsertedConfig, nil
}
func (r *serviceConfigSQLRepository) getAllForAccount(
ctx context.Context,
cloudProvider string,
cloudAccountId string,
) (map[string]*ServiceConfig, *model.ApiError) {
type ScannedServiceConfigRecord struct {
ServiceId string `db:"service_id"`
Config ServiceConfig `db:"config_json"`
}
records := []ScannedServiceConfigRecord{}
err := r.db.SelectContext(
ctx, &records, `
select
service_id,
config_json
from cloud_integrations_service_configs
where
cloud_provider=$1
and cloud_account_id=$2
`,
cloudProvider, cloudAccountId,
)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not query service configs from db: %w", err,
))
}
result := map[string]*ServiceConfig{}
for _, r := range records {
result[r.ServiceId] = &r.Config
}
return result, nil
}

View File

@@ -1 +0,0 @@
<svg width="256" height="256" viewBox="0 0 256 256" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><title>AWS Simple Storage Service (S3)</title><defs><linearGradient x1="0%" y1="100%" x2="100%" y2="0%" id="a"><stop stop-color="#1B660F" offset="0%"/><stop stop-color="#6CAE3E" offset="100%"/></linearGradient></defs><g><rect fill="url(#a)" width="256" height="256"/><path d="M194.67488,137.25632 L195.90368,128.60352 C207.23488,135.39072 207.38208,138.19392 207.378964,138.27072 C207.35968,138.28672 205.42688,139.89952 194.67488,137.25632 L194.67488,137.25632 Z M188.45728,135.52832 C168.87328,129.60192 141.59968,117.08992 130.56288,111.87392 C130.56288,111.82912 130.57568,111.78752 130.57568,111.74272 C130.57568,107.50272 127.12608,104.05312 122.88288,104.05312 C118.64608,104.05312 115.19648,107.50272 115.19648,111.74272 C115.19648,115.98272 118.64608,119.43232 122.88288,119.43232 C124.74528,119.43232 126.43488,118.73792 127.76928,117.63392 C140.75488,123.78112 167.81728,136.11072 187.54528,141.93472 L179.74368,196.99392 C179.72128,197.14432 179.71168,197.29472 179.71168,197.44512 C179.71168,202.29312 158.24928,211.19872 123.18048,211.19872 C87.74048,211.19872 66.05088,202.29312 66.05088,197.44512 C66.05088,197.29792 66.04128,197.15392 66.02208,197.00992 L49.72128,77.94752 C63.83008,87.65952 94.17568,92.79872 123.19968,92.79872 C152.17888,92.79872 182.47328,87.67872 196.61088,77.99552 L188.45728,135.52832 Z M47.99968,65.52832 C48.23008,61.31712 72.42848,44.79872 123.19968,44.79872 C173.96448,44.79872 198.16608,61.31392 198.39968,65.52832 L198.39968,66.96512 C195.61568,76.40832 164.25568,86.39872 123.19968,86.39872 C82.07328,86.39872 50.69728,76.37632 47.99968,66.92032 L47.99968,65.52832 Z M204.79968,65.59872 C204.79968,54.51072 173.01088,38.39872 123.19968,38.39872 C73.38848,38.39872 41.59968,54.51072 41.59968,65.59872 L41.90048,68.01152 L59.65408,197.68832 C60.07968,212.19072 98.75488,217.59872 123.18048,217.59872 C153.49088,217.59872 185.69248,210.62912 186.10848,197.69792 L193.77568,143.62752 C198.04128,144.64832 201.55168,145.16992 204.37088,145.16992 C208.15648,145.16992 210.71648,144.24512 212.26848,142.39552 C213.54208,140.87872 214.02848,139.04192 213.66368,137.08672 C212.83488,132.65792 207.57728,127.88352 196.87008,121.77472 L204.47328,68.13632 L204.79968,65.59872 Z" fill="#FFFFFF"/></g></svg>

Before

Width:  |  Height:  |  Size: 2.3 KiB

View File

@@ -1,49 +0,0 @@
{
"id": "s3sync",
"title": "S3 Sync",
"icon": "file://icon.svg",
"overview": "file://overview.md",
"supported_signals": {
"metrics": false,
"logs": true
},
"data_collected": {
"logs": [
{
"name": "Account ID",
"path": "resources.aws.account.id",
"type": "string"
},
{
"name": "Account Region",
"path": "resources.aws.account.region",
"type": "string"
},
{
"name": "Bucket Name",
"path": "resources.aws.bucket.name",
"type": "string"
},
{
"name": "Object Key",
"path": "attributes.aws.object.key",
"type": "string"
},
{
"name": "Object S3 Event Time",
"path": "attributes.aws.object.event_time",
"type": "string"
},
{
"name": "Log line number in source file",
"path": "attributes.log.source.line",
"type": "number"
}
]
},
"telemetry_collection_strategy": {
},
"assets": {
"dashboards": []
}
}

View File

@@ -1,3 +0,0 @@
### Sync logs stored in AWS S3 with SigNoz
Collect logs stored by AWS Services in S3 and explore them in Signoz.

View File

@@ -1,94 +0,0 @@
package services
import (
"github.com/SigNoz/signoz/pkg/types"
)
type Metadata struct {
Id string `json:"id"`
Title string `json:"title"`
Icon string `json:"icon"`
// Present only if the service has been configured in the
// context of a cloud provider account.
// Config *CloudServiceConfig `json:"config,omitempty"`
}
type Definition struct {
Metadata
Overview string `json:"overview"` // markdown
Assets Assets `json:"assets"`
SupportedSignals SupportedSignals `json:"supported_signals"`
DataCollected DataCollected `json:"data_collected"`
Strategy *CollectionStrategy `json:"telemetry_collection_strategy"`
}
type Assets struct {
Dashboards []Dashboard `json:"dashboards"`
}
type SupportedSignals struct {
Logs bool `json:"logs"`
Metrics bool `json:"metrics"`
}
type DataCollected struct {
Logs []CollectedLogAttribute `json:"logs"`
Metrics []CollectedMetric `json:"metrics"`
}
type CollectedLogAttribute struct {
Name string `json:"name"`
Path string `json:"path"`
Type string `json:"type"`
}
type CollectedMetric struct {
Name string `json:"name"`
Type string `json:"type"`
Unit string `json:"unit"`
Description string `json:"description"`
}
type CollectionStrategy struct {
Provider string `json:"provider"`
AWSMetrics *AWSMetricsStrategy `json:"aws_metrics,omitempty"`
AWSLogs *AWSLogsStrategy `json:"aws_logs,omitempty"`
}
type AWSMetricsStrategy struct {
// to be used as https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-metricstream.html#cfn-cloudwatch-metricstream-includefilters
StreamFilters []struct {
// json tags here are in the shape expected by AWS API as detailed at
// https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-metricstream-metricstreamfilter.html
Namespace string `json:"Namespace"`
MetricNames []string `json:"MetricNames,omitempty"`
} `json:"cloudwatch_metric_stream_filters"`
}
type AWSLogsStrategy struct {
Subscriptions []struct {
// subscribe to all logs groups with specified prefix.
// eg: `/aws/rds/`
LogGroupNamePrefix string `json:"log_group_name_prefix"`
// https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html
// "" implies no filtering is required.
FilterPattern string `json:"filter_pattern"`
} `json:"cloudwatch_logs_subscriptions"`
}
type Dashboard struct {
Id string `json:"id"`
Url string `json:"url"`
Title string `json:"title"`
Description string `json:"description"`
Image string `json:"image"`
Definition *types.DashboardData `json:"definition,omitempty"`
}

View File

@@ -22,7 +22,7 @@ import (
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -38,7 +38,6 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
"github.com/SigNoz/signoz/pkg/query-service/app/explorer"
"github.com/SigNoz/signoz/pkg/query-service/app/inframetrics"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
queues2 "github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/queues"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations/thirdPartyApi"
"github.com/SigNoz/signoz/pkg/query-service/app/logs"
@@ -1083,14 +1082,14 @@ func (aH *APIHandler) getDashboards(w http.ResponseWriter, r *http.Request) {
}
ic := aH.IntegrationsController
installedIntegrationDashboards, err := ic.GetDashboardsForInstalledIntegrations(r.Context())
installedIntegrationDashboards, err := ic.GetDashboardsForInstalledIntegrations(r.Context(), claims.OrgID)
if err != nil {
zap.L().Error("failed to get dashboards for installed integrations", zap.Error(err))
} else {
allDashboards = append(allDashboards, installedIntegrationDashboards...)
}
cloudIntegrationDashboards, err := aH.CloudIntegrationsController.AvailableDashboards(r.Context())
cloudIntegrationDashboards, err := aH.CloudIntegrationsController.AvailableDashboards(r.Context(), claims.OrgID)
if err != nil {
zap.L().Error("failed to get cloud dashboards", zap.Error(err))
} else {
@@ -1268,7 +1267,7 @@ func (aH *APIHandler) getDashboard(w http.ResponseWriter, r *http.Request) {
if aH.CloudIntegrationsController.IsCloudIntegrationDashboardUuid(uuid) {
dashboard, apiError = aH.CloudIntegrationsController.GetDashboardById(
r.Context(), uuid,
r.Context(), claims.OrgID, uuid,
)
if apiError != nil {
RespondError(w, apiError, nil)
@@ -1277,7 +1276,7 @@ func (aH *APIHandler) getDashboard(w http.ResponseWriter, r *http.Request) {
} else {
dashboard, apiError = aH.IntegrationsController.GetInstalledIntegrationDashboardById(
r.Context(), uuid,
r.Context(), claims.OrgID, uuid,
)
if apiError != nil {
RespondError(w, apiError, nil)
@@ -2208,6 +2207,11 @@ func (aH *APIHandler) editUser(w http.ResponseWriter, r *http.Request) {
old.ProfilePictureURL = update.ProfilePictureURL
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(old.Email)) {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user cannot be updated"))
return
}
_, apiErr = dao.DB().EditUser(ctx, &types.User{
ID: old.ID,
Name: old.Name,
@@ -2239,6 +2243,11 @@ func (aH *APIHandler) deleteUser(w http.ResponseWriter, r *http.Request) {
return
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email)) {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user cannot be updated"))
return
}
if user == nil {
RespondError(w, &model.ApiError{
Typ: model.ErrorNotFound,
@@ -3498,9 +3507,14 @@ func (aH *APIHandler) ListIntegrations(
for k, values := range r.URL.Query() {
params[k] = values[0]
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
resp, apiErr := aH.IntegrationsController.ListIntegrations(
r.Context(), params,
r.Context(), claims.OrgID, params,
)
if apiErr != nil {
RespondError(w, apiErr, "Failed to fetch integrations")
@@ -3513,8 +3527,13 @@ func (aH *APIHandler) GetIntegration(
w http.ResponseWriter, r *http.Request,
) {
integrationId := mux.Vars(r)["integrationId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
integration, apiErr := aH.IntegrationsController.GetIntegration(
r.Context(), integrationId,
r.Context(), claims.OrgID, integrationId,
)
if apiErr != nil {
RespondError(w, apiErr, "Failed to fetch integration details")
@@ -3528,8 +3547,13 @@ func (aH *APIHandler) GetIntegrationConnectionStatus(
w http.ResponseWriter, r *http.Request,
) {
integrationId := mux.Vars(r)["integrationId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
isInstalled, apiErr := aH.IntegrationsController.IsIntegrationInstalled(
r.Context(), integrationId,
r.Context(), claims.OrgID, integrationId,
)
if apiErr != nil {
RespondError(w, apiErr, "failed to check if integration is installed")
@@ -3543,7 +3567,7 @@ func (aH *APIHandler) GetIntegrationConnectionStatus(
}
connectionTests, apiErr := aH.IntegrationsController.GetIntegrationConnectionTests(
r.Context(), integrationId,
r.Context(), claims.OrgID, integrationId,
)
if apiErr != nil {
RespondError(w, apiErr, "failed to fetch integration connection tests")
@@ -3742,8 +3766,14 @@ func (aH *APIHandler) InstallIntegration(
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
integration, apiErr := aH.IntegrationsController.Install(
r.Context(), &req,
r.Context(), claims.OrgID, &req,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
@@ -3764,7 +3794,13 @@ func (aH *APIHandler) UninstallIntegration(
return
}
apiErr := aH.IntegrationsController.Uninstall(r.Context(), &req)
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
apiErr := aH.IntegrationsController.Uninstall(r.Context(), claims.OrgID, &req)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
@@ -3820,8 +3856,14 @@ func (aH *APIHandler) CloudIntegrationsListConnectedAccounts(
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
resp, apiErr := aH.CloudIntegrationsController.ListConnectedAccounts(
r.Context(), cloudProvider,
r.Context(), claims.OrgID, cloudProvider,
)
if apiErr != nil {
@@ -3842,8 +3884,14 @@ func (aH *APIHandler) CloudIntegrationsGenerateConnectionUrl(
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
result, apiErr := aH.CloudIntegrationsController.GenerateConnectionUrl(
r.Context(), cloudProvider, req,
r.Context(), claims.OrgID, cloudProvider, req,
)
if apiErr != nil {
@@ -3860,8 +3908,14 @@ func (aH *APIHandler) CloudIntegrationsGetAccountStatus(
cloudProvider := mux.Vars(r)["cloudProvider"]
accountId := mux.Vars(r)["accountId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
resp, apiErr := aH.CloudIntegrationsController.GetAccountStatus(
r.Context(), cloudProvider, accountId,
r.Context(), claims.OrgID, cloudProvider, accountId,
)
if apiErr != nil {
@@ -3882,8 +3936,14 @@ func (aH *APIHandler) CloudIntegrationsAgentCheckIn(
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
result, apiErr := aH.CloudIntegrationsController.CheckInAsAgent(
r.Context(), cloudProvider, req,
r.Context(), claims.OrgID, cloudProvider, req,
)
if apiErr != nil {
@@ -3906,8 +3966,14 @@ func (aH *APIHandler) CloudIntegrationsUpdateAccountConfig(
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
result, apiErr := aH.CloudIntegrationsController.UpdateAccountConfig(
r.Context(), cloudProvider, accountId, req,
r.Context(), claims.OrgID, cloudProvider, accountId, req,
)
if apiErr != nil {
@@ -3924,8 +3990,14 @@ func (aH *APIHandler) CloudIntegrationsDisconnectAccount(
cloudProvider := mux.Vars(r)["cloudProvider"]
accountId := mux.Vars(r)["accountId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
result, apiErr := aH.CloudIntegrationsController.DisconnectAccount(
r.Context(), cloudProvider, accountId,
r.Context(), claims.OrgID, cloudProvider, accountId,
)
if apiErr != nil {
@@ -3948,8 +4020,14 @@ func (aH *APIHandler) CloudIntegrationsListServices(
cloudAccountId = &cloudAccountIdQP
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
resp, apiErr := aH.CloudIntegrationsController.ListServices(
r.Context(), cloudProvider, cloudAccountId,
r.Context(), claims.OrgID, cloudProvider, cloudAccountId,
)
if apiErr != nil {
@@ -3972,8 +4050,14 @@ func (aH *APIHandler) CloudIntegrationsGetServiceDetails(
cloudAccountId = &cloudAccountIdQP
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
resp, apiErr := aH.CloudIntegrationsController.GetServiceDetails(
r.Context(), cloudProvider, serviceId, cloudAccountId,
r.Context(), claims.OrgID, cloudProvider, serviceId, cloudAccountId,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
@@ -3999,8 +4083,8 @@ func (aH *APIHandler) calculateCloudIntegrationServiceConnectionStatus(
ctx context.Context,
cloudProvider string,
cloudAccountId string,
svcDetails *cloudintegrations.ServiceDetails,
) (*cloudintegrations.ServiceConnectionStatus, *model.ApiError) {
svcDetails *cloudintegrations.CloudServiceDetails,
) (*cloudintegrations.CloudServiceConnectionStatus, *model.ApiError) {
if cloudProvider != "aws" {
// TODO(Raj): Make connection check generic for all providers in a follow up change
return nil, model.BadRequest(
@@ -4008,14 +4092,14 @@ func (aH *APIHandler) calculateCloudIntegrationServiceConnectionStatus(
)
}
telemetryCollectionStrategy := svcDetails.Strategy
telemetryCollectionStrategy := svcDetails.TelemetryCollectionStrategy
if telemetryCollectionStrategy == nil {
return nil, model.InternalError(fmt.Errorf(
"service doesn't have telemetry collection strategy: %s", svcDetails.Id,
))
}
result := &cloudintegrations.ServiceConnectionStatus{}
result := &cloudintegrations.CloudServiceConnectionStatus{}
errors := []*model.ApiError{}
var resultLock sync.Mutex
@@ -4075,10 +4159,10 @@ func (aH *APIHandler) calculateCloudIntegrationServiceConnectionStatus(
func (aH *APIHandler) calculateAWSIntegrationSvcMetricsConnectionStatus(
ctx context.Context,
cloudAccountId string,
strategy *services.AWSMetricsStrategy,
metricsCollectedBySvc []services.CollectedMetric,
strategy *cloudintegrations.AWSMetricsCollectionStrategy,
metricsCollectedBySvc []cloudintegrations.CollectedMetric,
) (*cloudintegrations.SignalConnectionStatus, *model.ApiError) {
if strategy == nil || len(strategy.StreamFilters) < 1 {
if strategy == nil || len(strategy.CloudwatchMetricsStreamFilters) < 1 {
return nil, nil
}
@@ -4087,7 +4171,7 @@ func (aH *APIHandler) calculateAWSIntegrationSvcMetricsConnectionStatus(
"cloud_account_id": cloudAccountId,
}
metricsNamespace := strategy.StreamFilters[0].Namespace
metricsNamespace := strategy.CloudwatchMetricsStreamFilters[0].Namespace
metricsNamespaceParts := strings.Split(metricsNamespace, "/")
if len(metricsNamespaceParts) >= 2 {
@@ -4124,13 +4208,13 @@ func (aH *APIHandler) calculateAWSIntegrationSvcMetricsConnectionStatus(
func (aH *APIHandler) calculateAWSIntegrationSvcLogsConnectionStatus(
ctx context.Context,
cloudAccountId string,
strategy *services.AWSLogsStrategy,
strategy *cloudintegrations.AWSLogsCollectionStrategy,
) (*cloudintegrations.SignalConnectionStatus, *model.ApiError) {
if strategy == nil || len(strategy.Subscriptions) < 1 {
if strategy == nil || len(strategy.CloudwatchLogsSubscriptions) < 1 {
return nil, nil
}
logGroupNamePrefix := strategy.Subscriptions[0].LogGroupNamePrefix
logGroupNamePrefix := strategy.CloudwatchLogsSubscriptions[0].LogGroupNamePrefix
if len(logGroupNamePrefix) < 1 {
return nil, nil
}
@@ -4212,8 +4296,14 @@ func (aH *APIHandler) CloudIntegrationsUpdateServiceConfig(
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
result, apiErr := aH.CloudIntegrationsController.UpdateServiceConfig(
r.Context(), cloudProvider, serviceId, req,
r.Context(), claims.OrgID, cloudProvider, serviceId, req,
)
if apiErr != nil {

View File

@@ -18,7 +18,7 @@ type Controller struct {
func NewController(sqlStore sqlstore.SQLStore) (
*Controller, error,
) {
mgr, err := NewManager(sqlStore.SQLxDB())
mgr, err := NewManager(sqlStore)
if err != nil {
return nil, fmt.Errorf("couldn't create integrations manager: %w", err)
}
@@ -35,7 +35,7 @@ type IntegrationsListResponse struct {
}
func (c *Controller) ListIntegrations(
ctx context.Context, params map[string]string,
ctx context.Context, orgId string, params map[string]string,
) (
*IntegrationsListResponse, *model.ApiError,
) {
@@ -47,7 +47,7 @@ func (c *Controller) ListIntegrations(
}
}
integrations, apiErr := c.mgr.ListIntegrations(ctx, filters)
integrations, apiErr := c.mgr.ListIntegrations(ctx, orgId, filters)
if apiErr != nil {
return nil, apiErr
}
@@ -58,16 +58,15 @@ func (c *Controller) ListIntegrations(
}
func (c *Controller) GetIntegration(
ctx context.Context, integrationId string,
ctx context.Context, orgId string, integrationId string,
) (*Integration, *model.ApiError) {
return c.mgr.GetIntegration(ctx, integrationId)
return c.mgr.GetIntegration(ctx, orgId, integrationId)
}
func (c *Controller) IsIntegrationInstalled(
ctx context.Context,
integrationId string,
ctx context.Context, orgId string, integrationId string,
) (bool, *model.ApiError) {
installation, apiErr := c.mgr.getInstalledIntegration(ctx, integrationId)
installation, apiErr := c.mgr.getInstalledIntegration(ctx, orgId, integrationId)
if apiErr != nil {
return false, apiErr
}
@@ -76,9 +75,9 @@ func (c *Controller) IsIntegrationInstalled(
}
func (c *Controller) GetIntegrationConnectionTests(
ctx context.Context, integrationId string,
ctx context.Context, orgId string, integrationId string,
) (*IntegrationConnectionTests, *model.ApiError) {
return c.mgr.GetIntegrationConnectionTests(ctx, integrationId)
return c.mgr.GetIntegrationConnectionTests(ctx, orgId, integrationId)
}
type InstallIntegrationRequest struct {
@@ -87,10 +86,10 @@ type InstallIntegrationRequest struct {
}
func (c *Controller) Install(
ctx context.Context, req *InstallIntegrationRequest,
ctx context.Context, orgId string, req *InstallIntegrationRequest,
) (*IntegrationsListItem, *model.ApiError) {
res, apiErr := c.mgr.InstallIntegration(
ctx, req.IntegrationId, req.Config,
ctx, orgId, req.IntegrationId, req.Config,
)
if apiErr != nil {
return nil, apiErr
@@ -104,7 +103,7 @@ type UninstallIntegrationRequest struct {
}
func (c *Controller) Uninstall(
ctx context.Context, req *UninstallIntegrationRequest,
ctx context.Context, orgId string, req *UninstallIntegrationRequest,
) *model.ApiError {
if len(req.IntegrationId) < 1 {
return model.BadRequest(fmt.Errorf(
@@ -113,7 +112,7 @@ func (c *Controller) Uninstall(
}
apiErr := c.mgr.UninstallIntegration(
ctx, req.IntegrationId,
ctx, orgId, req.IntegrationId,
)
if apiErr != nil {
return apiErr
@@ -123,19 +122,19 @@ func (c *Controller) Uninstall(
}
func (c *Controller) GetPipelinesForInstalledIntegrations(
ctx context.Context,
ctx context.Context, orgId string,
) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
return c.mgr.GetPipelinesForInstalledIntegrations(ctx)
return c.mgr.GetPipelinesForInstalledIntegrations(ctx, orgId)
}
func (c *Controller) GetDashboardsForInstalledIntegrations(
ctx context.Context,
ctx context.Context, orgId string,
) ([]types.Dashboard, *model.ApiError) {
return c.mgr.GetDashboardsForInstalledIntegrations(ctx)
return c.mgr.GetDashboardsForInstalledIntegrations(ctx, orgId)
}
func (c *Controller) GetInstalledIntegrationDashboardById(
ctx context.Context, dashboardUuid string,
ctx context.Context, orgId string, dashboardUuid string,
) (*types.Dashboard, *model.ApiError) {
return c.mgr.GetInstalledIntegrationDashboardById(ctx, dashboardUuid)
return c.mgr.GetInstalledIntegrationDashboardById(ctx, orgId, dashboardUuid)
}

View File

@@ -5,15 +5,14 @@ import (
"fmt"
"slices"
"strings"
"time"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/jmoiron/sqlx"
)
type IntegrationAuthor struct {
@@ -105,16 +104,9 @@ type IntegrationsListItem struct {
IsInstalled bool `json:"is_installed"`
}
type InstalledIntegration struct {
IntegrationId string `json:"integration_id" db:"integration_id"`
Config InstalledIntegrationConfig `json:"config_json" db:"config_json"`
InstalledAt time.Time `json:"installed_at" db:"installed_at"`
}
type InstalledIntegrationConfig map[string]interface{}
type Integration struct {
IntegrationDetails
Installation *InstalledIntegration `json:"installation"`
Installation *types.InstalledIntegration `json:"installation"`
}
type Manager struct {
@@ -122,8 +114,8 @@ type Manager struct {
installedIntegrationsRepo InstalledIntegrationsRepo
}
func NewManager(db *sqlx.DB) (*Manager, error) {
iiRepo, err := NewInstalledIntegrationsSqliteRepo(db)
func NewManager(store sqlstore.SQLStore) (*Manager, error) {
iiRepo, err := NewInstalledIntegrationsSqliteRepo(store)
if err != nil {
return nil, fmt.Errorf(
"could not init sqlite DB for installed integrations: %w", err,
@@ -142,6 +134,7 @@ type IntegrationsFilter struct {
func (m *Manager) ListIntegrations(
ctx context.Context,
orgId string,
filter *IntegrationsFilter,
// Expected to have pagination over time.
) ([]IntegrationsListItem, *model.ApiError) {
@@ -152,22 +145,22 @@ func (m *Manager) ListIntegrations(
)
}
installed, apiErr := m.installedIntegrationsRepo.list(ctx)
installed, apiErr := m.installedIntegrationsRepo.list(ctx, orgId)
if apiErr != nil {
return nil, model.WrapApiError(
apiErr, "could not fetch installed integrations",
)
}
installedIds := []string{}
installedTypes := []string{}
for _, ii := range installed {
installedIds = append(installedIds, ii.IntegrationId)
installedTypes = append(installedTypes, ii.Type)
}
result := []IntegrationsListItem{}
for _, ai := range available {
result = append(result, IntegrationsListItem{
IntegrationSummary: ai.IntegrationSummary,
IsInstalled: slices.Contains(installedIds, ai.Id),
IsInstalled: slices.Contains(installedTypes, ai.Id),
})
}
@@ -188,6 +181,7 @@ func (m *Manager) ListIntegrations(
func (m *Manager) GetIntegration(
ctx context.Context,
orgId string,
integrationId string,
) (*Integration, *model.ApiError) {
integrationDetails, apiErr := m.getIntegrationDetails(
@@ -198,7 +192,7 @@ func (m *Manager) GetIntegration(
}
installation, apiErr := m.getInstalledIntegration(
ctx, integrationId,
ctx, orgId, integrationId,
)
if apiErr != nil {
return nil, apiErr
@@ -212,6 +206,7 @@ func (m *Manager) GetIntegration(
func (m *Manager) GetIntegrationConnectionTests(
ctx context.Context,
orgId string,
integrationId string,
) (*IntegrationConnectionTests, *model.ApiError) {
integrationDetails, apiErr := m.getIntegrationDetails(
@@ -225,8 +220,9 @@ func (m *Manager) GetIntegrationConnectionTests(
func (m *Manager) InstallIntegration(
ctx context.Context,
orgId string,
integrationId string,
config InstalledIntegrationConfig,
config types.InstalledIntegrationConfig,
) (*IntegrationsListItem, *model.ApiError) {
integrationDetails, apiErr := m.getIntegrationDetails(ctx, integrationId)
if apiErr != nil {
@@ -234,7 +230,7 @@ func (m *Manager) InstallIntegration(
}
_, apiErr = m.installedIntegrationsRepo.upsert(
ctx, integrationId, config,
ctx, orgId, integrationId, config,
)
if apiErr != nil {
return nil, model.WrapApiError(
@@ -250,15 +246,17 @@ func (m *Manager) InstallIntegration(
func (m *Manager) UninstallIntegration(
ctx context.Context,
orgId string,
integrationId string,
) *model.ApiError {
return m.installedIntegrationsRepo.delete(ctx, integrationId)
return m.installedIntegrationsRepo.delete(ctx, orgId, integrationId)
}
func (m *Manager) GetPipelinesForInstalledIntegrations(
ctx context.Context,
orgId string,
) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx)
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx, orgId)
if apiErr != nil {
return nil, apiErr
}
@@ -308,6 +306,7 @@ func (m *Manager) parseDashboardUuid(dashboardUuid string) (
func (m *Manager) GetInstalledIntegrationDashboardById(
ctx context.Context,
orgId string,
dashboardUuid string,
) (*types.Dashboard, *model.ApiError) {
integrationId, dashboardId, apiErr := m.parseDashboardUuid(dashboardUuid)
@@ -315,7 +314,7 @@ func (m *Manager) GetInstalledIntegrationDashboardById(
return nil, apiErr
}
integration, apiErr := m.GetIntegration(ctx, integrationId)
integration, apiErr := m.GetIntegration(ctx, orgId, integrationId)
if apiErr != nil {
return nil, apiErr
}
@@ -355,8 +354,9 @@ func (m *Manager) GetInstalledIntegrationDashboardById(
func (m *Manager) GetDashboardsForInstalledIntegrations(
ctx context.Context,
orgId string,
) ([]types.Dashboard, *model.ApiError) {
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx)
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx, orgId)
if apiErr != nil {
return nil, apiErr
}
@@ -421,10 +421,11 @@ func (m *Manager) getIntegrationDetails(
func (m *Manager) getInstalledIntegration(
ctx context.Context,
orgId string,
integrationId string,
) (*InstalledIntegration, *model.ApiError) {
) (*types.InstalledIntegration, *model.ApiError) {
iis, apiErr := m.installedIntegrationsRepo.get(
ctx, []string{integrationId},
ctx, orgId, []string{integrationId},
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, fmt.Sprintf(
@@ -441,32 +442,33 @@ func (m *Manager) getInstalledIntegration(
func (m *Manager) getInstalledIntegrations(
ctx context.Context,
orgId string,
) (
map[string]Integration, *model.ApiError,
) {
installations, apiErr := m.installedIntegrationsRepo.list(ctx)
installations, apiErr := m.installedIntegrationsRepo.list(ctx, orgId)
if apiErr != nil {
return nil, apiErr
}
installedIds := utils.MapSlice(installations, func(i InstalledIntegration) string {
return i.IntegrationId
installedTypes := utils.MapSlice(installations, func(i types.InstalledIntegration) string {
return i.Type
})
integrationDetails, apiErr := m.availableIntegrationsRepo.get(ctx, installedIds)
integrationDetails, apiErr := m.availableIntegrationsRepo.get(ctx, installedTypes)
if apiErr != nil {
return nil, apiErr
}
result := map[string]Integration{}
for _, ii := range installations {
iDetails, exists := integrationDetails[ii.IntegrationId]
iDetails, exists := integrationDetails[ii.Type]
if !exists {
return nil, model.InternalError(fmt.Errorf(
"couldn't find integration details for %s", ii.IntegrationId,
"couldn't find integration details for %s", ii.Type,
))
}
result[ii.IntegrationId] = Integration{
result[ii.Type] = Integration{
Installation: &ii,
IntegrationDetails: iDetails,
}

View File

@@ -14,18 +14,23 @@ func TestIntegrationLifecycle(t *testing.T) {
mgr := NewTestIntegrationsManager(t)
ctx := context.Background()
user, apiErr := createTestUser()
if apiErr != nil {
t.Fatalf("could not create test user: %v", apiErr)
}
ii := true
installedIntegrationsFilter := &IntegrationsFilter{
IsInstalled: &ii,
}
installedIntegrations, apiErr := mgr.ListIntegrations(
ctx, installedIntegrationsFilter,
ctx, user.OrgID, installedIntegrationsFilter,
)
require.Nil(apiErr)
require.Equal([]IntegrationsListItem{}, installedIntegrations)
availableIntegrations, apiErr := mgr.ListIntegrations(ctx, nil)
availableIntegrations, apiErr := mgr.ListIntegrations(ctx, user.OrgID, nil)
require.Nil(apiErr)
require.Equal(2, len(availableIntegrations))
require.False(availableIntegrations[0].IsInstalled)
@@ -33,44 +38,44 @@ func TestIntegrationLifecycle(t *testing.T) {
testIntegrationConfig := map[string]interface{}{}
installed, apiErr := mgr.InstallIntegration(
ctx, availableIntegrations[1].Id, testIntegrationConfig,
ctx, user.OrgID, availableIntegrations[1].Id, testIntegrationConfig,
)
require.Nil(apiErr)
require.Equal(installed.Id, availableIntegrations[1].Id)
integration, apiErr := mgr.GetIntegration(ctx, availableIntegrations[1].Id)
integration, apiErr := mgr.GetIntegration(ctx, user.OrgID, availableIntegrations[1].Id)
require.Nil(apiErr)
require.Equal(integration.Id, availableIntegrations[1].Id)
require.NotNil(integration.Installation)
installedIntegrations, apiErr = mgr.ListIntegrations(
ctx, installedIntegrationsFilter,
ctx, user.OrgID, installedIntegrationsFilter,
)
require.Nil(apiErr)
require.Equal(1, len(installedIntegrations))
require.Equal(availableIntegrations[1].Id, installedIntegrations[0].Id)
availableIntegrations, apiErr = mgr.ListIntegrations(ctx, nil)
availableIntegrations, apiErr = mgr.ListIntegrations(ctx, user.OrgID, nil)
require.Nil(apiErr)
require.Equal(2, len(availableIntegrations))
require.False(availableIntegrations[0].IsInstalled)
require.True(availableIntegrations[1].IsInstalled)
apiErr = mgr.UninstallIntegration(ctx, installed.Id)
apiErr = mgr.UninstallIntegration(ctx, user.OrgID, installed.Id)
require.Nil(apiErr)
integration, apiErr = mgr.GetIntegration(ctx, availableIntegrations[1].Id)
integration, apiErr = mgr.GetIntegration(ctx, user.OrgID, availableIntegrations[1].Id)
require.Nil(apiErr)
require.Equal(integration.Id, availableIntegrations[1].Id)
require.Nil(integration.Installation)
installedIntegrations, apiErr = mgr.ListIntegrations(
ctx, installedIntegrationsFilter,
ctx, user.OrgID, installedIntegrationsFilter,
)
require.Nil(apiErr)
require.Equal(0, len(installedIntegrations))
availableIntegrations, apiErr = mgr.ListIntegrations(ctx, nil)
availableIntegrations, apiErr = mgr.ListIntegrations(ctx, user.OrgID, nil)
require.Nil(apiErr)
require.Equal(2, len(availableIntegrations))
require.False(availableIntegrations[0].IsInstalled)

View File

@@ -2,51 +2,33 @@ package integrations
import (
"context"
"database/sql/driver"
"encoding/json"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/pkg/errors"
"github.com/SigNoz/signoz/pkg/types"
)
// For serializing from db
func (c *InstalledIntegrationConfig) Scan(src interface{}) error {
if data, ok := src.([]byte); ok {
return json.Unmarshal(data, &c)
}
return nil
}
// For serializing to db
func (c *InstalledIntegrationConfig) Value() (driver.Value, error) {
filterSetJson, err := json.Marshal(c)
if err != nil {
return nil, errors.Wrap(err, "could not serialize integration config to JSON")
}
return filterSetJson, nil
}
type InstalledIntegrationsRepo interface {
list(context.Context) ([]InstalledIntegration, *model.ApiError)
list(ctx context.Context, orgId string) ([]types.InstalledIntegration, *model.ApiError)
get(
ctx context.Context, integrationIds []string,
) (map[string]InstalledIntegration, *model.ApiError)
ctx context.Context, orgId string, integrationTypes []string,
) (map[string]types.InstalledIntegration, *model.ApiError)
upsert(
ctx context.Context,
integrationId string,
config InstalledIntegrationConfig,
) (*InstalledIntegration, *model.ApiError)
orgId string,
integrationType string,
config types.InstalledIntegrationConfig,
) (*types.InstalledIntegration, *model.ApiError)
delete(ctx context.Context, integrationId string) *model.ApiError
delete(ctx context.Context, orgId string, integrationType string) *model.ApiError
}
type AvailableIntegrationsRepo interface {
list(context.Context) ([]IntegrationDetails, *model.ApiError)
get(
ctx context.Context, integrationIds []string,
ctx context.Context, integrationTypes []string,
) (map[string]IntegrationDetails, *model.ApiError)
// AvailableIntegrationsRepo implementations are expected to cache

View File

@@ -3,39 +3,37 @@ package integrations
import (
"context"
"fmt"
"strings"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/jmoiron/sqlx"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
)
type InstalledIntegrationsSqliteRepo struct {
db *sqlx.DB
store sqlstore.SQLStore
}
func NewInstalledIntegrationsSqliteRepo(db *sqlx.DB) (
func NewInstalledIntegrationsSqliteRepo(store sqlstore.SQLStore) (
*InstalledIntegrationsSqliteRepo, error,
) {
return &InstalledIntegrationsSqliteRepo{
db: db,
store: store,
}, nil
}
func (r *InstalledIntegrationsSqliteRepo) list(
ctx context.Context,
) ([]InstalledIntegration, *model.ApiError) {
integrations := []InstalledIntegration{}
orgId string,
) ([]types.InstalledIntegration, *model.ApiError) {
integrations := []types.InstalledIntegration{}
err := r.db.SelectContext(
ctx, &integrations, `
select
integration_id,
config_json,
installed_at
from integrations_installed
order by installed_at
`,
)
err := r.store.BunDB().NewSelect().
Model(&integrations).
Where("org_id = ?", orgId).
Order("installed_at").
Scan(ctx)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not query installed integrations: %w", err,
@@ -45,38 +43,28 @@ func (r *InstalledIntegrationsSqliteRepo) list(
}
func (r *InstalledIntegrationsSqliteRepo) get(
ctx context.Context, integrationIds []string,
) (map[string]InstalledIntegration, *model.ApiError) {
integrations := []InstalledIntegration{}
ctx context.Context, orgId string, integrationTypes []string,
) (map[string]types.InstalledIntegration, *model.ApiError) {
integrations := []types.InstalledIntegration{}
idPlaceholders := []string{}
idValues := []interface{}{}
for _, id := range integrationIds {
idPlaceholders = append(idPlaceholders, "?")
idValues = append(idValues, id)
typeValues := []interface{}{}
for _, integrationType := range integrationTypes {
typeValues = append(typeValues, integrationType)
}
err := r.db.SelectContext(
ctx, &integrations, fmt.Sprintf(`
select
integration_id,
config_json,
installed_at
from integrations_installed
where integration_id in (%s)`,
strings.Join(idPlaceholders, ", "),
),
idValues...,
)
err := r.store.BunDB().NewSelect().Model(&integrations).
Where("org_id = ?", orgId).
Where("type IN (?)", bun.In(typeValues)).
Scan(ctx)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not query installed integrations: %w", err,
))
}
result := map[string]InstalledIntegration{}
result := map[string]types.InstalledIntegration{}
for _, ii := range integrations {
result[ii.IntegrationId] = ii
result[ii.Type] = ii
}
return result, nil
@@ -84,55 +72,57 @@ func (r *InstalledIntegrationsSqliteRepo) get(
func (r *InstalledIntegrationsSqliteRepo) upsert(
ctx context.Context,
integrationId string,
config InstalledIntegrationConfig,
) (*InstalledIntegration, *model.ApiError) {
serializedConfig, err := config.Value()
if err != nil {
return nil, model.BadRequest(fmt.Errorf(
"could not serialize integration config: %w", err,
))
orgId string,
integrationType string,
config types.InstalledIntegrationConfig,
) (*types.InstalledIntegration, *model.ApiError) {
integration := types.InstalledIntegration{
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
OrgID: orgId,
Type: integrationType,
Config: config,
}
_, dbErr := r.db.ExecContext(
ctx, `
INSERT INTO integrations_installed (
integration_id,
config_json
) values ($1, $2)
on conflict(integration_id) do update
set config_json=excluded.config_json
`, integrationId, serializedConfig,
)
_, dbErr := r.store.BunDB().NewInsert().
Model(&integration).
On("conflict (type, org_id) DO UPDATE").
Set("config = EXCLUDED.config").
Exec(ctx)
if dbErr != nil {
return nil, model.InternalError(fmt.Errorf(
"could not insert record for integration installation: %w", dbErr,
))
}
res, apiErr := r.get(ctx, []string{integrationId})
res, apiErr := r.get(ctx, orgId, []string{integrationType})
if apiErr != nil || len(res) < 1 {
return nil, model.WrapApiError(
apiErr, "could not fetch installed integration",
)
}
installed := res[integrationId]
installed := res[integrationType]
return &installed, nil
}
func (r *InstalledIntegrationsSqliteRepo) delete(
ctx context.Context, integrationId string,
ctx context.Context, orgId string, integrationType string,
) *model.ApiError {
_, dbErr := r.db.ExecContext(ctx, `
DELETE FROM integrations_installed where integration_id = ?
`, integrationId)
_, dbErr := r.store.BunDB().NewDelete().
Model(&types.InstalledIntegration{}).
Where("type = ?", integrationType).
Where("org_id = ?", orgId).
Exec(ctx)
if dbErr != nil {
return model.InternalError(fmt.Errorf(
"could not delete installed integration record for %s: %w",
integrationId, dbErr,
integrationType, dbErr,
))
}

View File

@@ -5,18 +5,22 @@ import (
"slices"
"testing"
"github.com/SigNoz/signoz/pkg/query-service/auth"
"github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/dao"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/google/uuid"
)
func NewTestIntegrationsManager(t *testing.T) *Manager {
testDB := utils.NewQueryServiceDBForTests(t)
installedIntegrationsRepo, err := NewInstalledIntegrationsSqliteRepo(testDB.SQLxDB())
installedIntegrationsRepo, err := NewInstalledIntegrationsSqliteRepo(testDB)
if err != nil {
t.Fatalf("could not init sqlite DB for installed integrations: %v", err)
}
@@ -27,6 +31,38 @@ func NewTestIntegrationsManager(t *testing.T) *Manager {
}
}
func createTestUser() (*types.User, *model.ApiError) {
// Create a test user for auth
ctx := context.Background()
org, apiErr := dao.DB().CreateOrg(ctx, &types.Organization{
Name: "test",
})
if apiErr != nil {
return nil, apiErr
}
group, apiErr := dao.DB().GetGroupByName(ctx, constants.AdminGroup)
if apiErr != nil {
return nil, apiErr
}
auth.InitAuthCache(ctx)
userId := uuid.NewString()
return dao.DB().CreateUser(
ctx,
&types.User{
ID: userId,
Name: "test",
Email: userId[:8] + "test@test.com",
Password: "test",
OrgID: org.ID,
GroupID: group.ID,
},
true,
)
}
type TestAvailableIntegrationsRepo struct{}
func (t *TestAvailableIntegrationsRepo) list(

View File

@@ -25,12 +25,12 @@ import (
type LogParsingPipelineController struct {
Repo
GetIntegrationPipelines func(context.Context) ([]pipelinetypes.GettablePipeline, *model.ApiError)
GetIntegrationPipelines func(context.Context, string) ([]pipelinetypes.GettablePipeline, *model.ApiError)
}
func NewLogParsingPipelinesController(
sqlStore sqlstore.SQLStore,
getIntegrationPipelines func(context.Context) ([]pipelinetypes.GettablePipeline, *model.ApiError),
getIntegrationPipelines func(context.Context, string) ([]pipelinetypes.GettablePipeline, *model.ApiError),
) (*LogParsingPipelineController, error) {
repo := NewRepo(sqlStore)
return &LogParsingPipelineController{
@@ -164,7 +164,7 @@ func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
result = savedPipelines
}
integrationPipelines, apiErr := ic.GetIntegrationPipelines(ctx)
integrationPipelines, apiErr := ic.GetIntegrationPipelines(ctx, defaultOrgID)
if apiErr != nil {
return nil, model.WrapApiError(
apiErr, "could not get pipelines for installed integrations",

View File

@@ -131,9 +131,11 @@ func getOperators(ops []pipelinetypes.PipelineOperator) ([]pipelinetypes.Pipelin
)
}
operator.If = fmt.Sprintf(
`%s && %s matches "^\\s*{.*}\\s*$"`, parseFromNotNilCheck, operator.ParseFrom,
`%s && (
(typeOf(%s) == "string" && %s matches "^\\s*{.*}\\s*$" ) ||
typeOf(%s) == "map[string]any"
)`, parseFromNotNilCheck, operator.ParseFrom, operator.ParseFrom, operator.ParseFrom,
)
} else if operator.Type == "add" {
if strings.HasPrefix(operator.Value, "EXPR(") && strings.HasSuffix(operator.Value, ")") {
expression := strings.TrimSuffix(strings.TrimPrefix(operator.Value, "EXPR("), ")")

View File

@@ -646,7 +646,7 @@ func TestMembershipOpInProcessorFieldExpressions(t *testing.T) {
require := require.New(t)
testLogs := []model.SignozLog{
makeTestSignozLog("test log", map[string]interface{}{
makeTestSignozLog("test log", map[string]any{
"http.method": "GET",
"order.products": `{"ids": ["pid0", "pid1"]}`,
}),

View File

@@ -719,6 +719,21 @@ func parseFilterAttributeKeyRequest(r *http.Request) (*v3.FilterAttributeKeyRequ
aggregateOperator := v3.AggregateOperator(r.URL.Query().Get("aggregateOperator"))
aggregateAttribute := r.URL.Query().Get("aggregateAttribute")
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
tagType := v3.TagType(r.URL.Query().Get("tagType"))
// empty string is a valid tagType
// i.e retrieve all attributes
if tagType != "" {
// what is happening here?
// if tagType is undefined(uh oh javascript) or any invalid value, set it to empty string
// instead of failing the request. Ideally, we should fail the request.
// but we are not doing that to maintain backward compatibility.
if err := tagType.Validate(); err != nil {
// if the tagType is invalid, set it to empty string
tagType = ""
}
}
if err != nil {
limit = 50
}
@@ -739,6 +754,7 @@ func parseFilterAttributeKeyRequest(r *http.Request) (*v3.FilterAttributeKeyRequ
AggregateAttribute: aggregateAttribute,
Limit: limit,
SearchText: r.URL.Query().Get("searchText"),
TagType: tagType,
}
return &req, nil
}
@@ -861,7 +877,7 @@ func chTransformQuery(query string, variables map[string]interface{}) {
transformer := chVariables.NewQueryTransformer(query, varsForTransform)
transformedQuery, err := transformer.Transform()
if err != nil {
zap.L().Warn("failed to transform clickhouse query", zap.Error(err))
zap.L().Warn("failed to transform clickhouse query", zap.String("query", query), zap.Error(err))
}
zap.L().Info("transformed clickhouse query", zap.String("transformedQuery", transformedQuery), zap.String("originalQuery", query))
}

View File

@@ -112,6 +112,7 @@ func TestParseFilterAttributeKeyRequest(t *testing.T) {
expectedSearchText string
expectErr bool
errMsg string
expectedTagType v3.TagType
}{
{
desc: "valid operator and data source",
@@ -168,6 +169,38 @@ func TestParseFilterAttributeKeyRequest(t *testing.T) {
expectedDataSource: v3.DataSourceTraces,
expectedLimit: 50,
},
{
desc: "invalid tag type",
queryString: "aggregateOperator=avg&dataSource=traces&tagType=invalid",
expectedOperator: v3.AggregateOperatorAvg,
expectedDataSource: v3.DataSourceTraces,
expectedTagType: "",
expectedLimit: 50,
},
{
desc: "valid tag type",
queryString: "aggregateOperator=avg&dataSource=traces&tagType=resource",
expectedOperator: v3.AggregateOperatorAvg,
expectedDataSource: v3.DataSourceTraces,
expectedTagType: v3.TagTypeResource,
expectedLimit: 50,
},
{
desc: "valid tag type",
queryString: "aggregateOperator=avg&dataSource=traces&tagType=scope",
expectedOperator: v3.AggregateOperatorAvg,
expectedDataSource: v3.DataSourceTraces,
expectedTagType: v3.TagTypeInstrumentationScope,
expectedLimit: 50,
},
{
desc: "valid tag type",
queryString: "aggregateOperator=avg&dataSource=traces&tagType=tag",
expectedOperator: v3.AggregateOperatorAvg,
expectedDataSource: v3.DataSourceTraces,
expectedTagType: v3.TagTypeTag,
expectedLimit: 50,
},
}
for _, reqCase := range reqCases {

View File

@@ -439,7 +439,7 @@ func RegisterFirstUser(ctx context.Context, req *RegisterRequest) (*types.User,
}
user := &types.User{
ID: uuid.NewString(),
ID: uuid.New().String(),
Name: req.Name,
Email: req.Email,
Password: hash,
@@ -519,7 +519,7 @@ func RegisterInvitedUser(ctx context.Context, req *RegisterRequest, nopassword b
}
user := &types.User{
ID: uuid.NewString(),
ID: uuid.New().String(),
Name: req.Name,
Email: req.Email,
Password: hash,

View File

@@ -3,6 +3,7 @@ package auth
import (
"context"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/dao"
"github.com/SigNoz/signoz/pkg/types"
@@ -51,7 +52,7 @@ func InitAuthCache(ctx context.Context) error {
func GetUserFromReqContext(ctx context.Context) (*types.GettableUser, error) {
claims, ok := authtypes.ClaimsFromContext(ctx)
if !ok {
return nil, errors.New("no claims found in context")
return nil, errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "no claims found in context")
}
user := &types.GettableUser{

View File

@@ -248,6 +248,7 @@ func (q TagType) Validate() error {
type FilterAttributeKeyRequest struct {
DataSource DataSource `json:"dataSource"`
AggregateOperator AggregateOperator `json:"aggregateOperator"`
TagType TagType `json:"tagType"`
AggregateAttribute string `json:"aggregateAttribute"`
SearchText string `json:"searchText"`
Limit int `json:"limit"`

View File

@@ -35,7 +35,7 @@ func TestAWSIntegrationAccountLifecycle(t *testing.T) {
)
// Should be able to generate a connection url from UI - initializing an integration account
testAccountConfig := cloudintegrations.AccountConfig{
testAccountConfig := types.AccountConfig{
EnabledRegions: []string{"us-east-1", "us-east-2"},
}
connectionUrlResp := testbed.GenerateConnectionUrlFromQS(
@@ -65,8 +65,8 @@ func TestAWSIntegrationAccountLifecycle(t *testing.T) {
testAWSAccountId := "4563215233"
agentCheckInResp := testbed.CheckInAsAgentWithQS(
"aws", cloudintegrations.AgentCheckInRequest{
AccountId: testAccountId,
CloudAccountId: testAWSAccountId,
ID: testAccountId,
AccountID: testAWSAccountId,
},
)
require.Equal(testAccountId, agentCheckInResp.AccountId)
@@ -91,20 +91,20 @@ func TestAWSIntegrationAccountLifecycle(t *testing.T) {
require.Equal(testAWSAccountId, accountsListResp2.Accounts[0].CloudAccountId)
// Should be able to update account config from UI
testAccountConfig2 := cloudintegrations.AccountConfig{
testAccountConfig2 := types.AccountConfig{
EnabledRegions: []string{"us-east-2", "us-west-1"},
}
latestAccount := testbed.UpdateAccountConfigWithQS(
"aws", testAccountId, testAccountConfig2,
)
require.Equal(testAccountId, latestAccount.Id)
require.Equal(testAccountId, latestAccount.ID.StringValue())
require.Equal(testAccountConfig2, *latestAccount.Config)
// The agent should now receive latest account config.
agentCheckInResp1 := testbed.CheckInAsAgentWithQS(
"aws", cloudintegrations.AgentCheckInRequest{
AccountId: testAccountId,
CloudAccountId: testAWSAccountId,
ID: testAccountId,
AccountID: testAWSAccountId,
},
)
require.Equal(testAccountId, agentCheckInResp1.AccountId)
@@ -114,14 +114,14 @@ func TestAWSIntegrationAccountLifecycle(t *testing.T) {
// Should be able to disconnect/remove account from UI.
tsBeforeDisconnect := time.Now()
latestAccount = testbed.DisconnectAccountWithQS("aws", testAccountId)
require.Equal(testAccountId, latestAccount.Id)
require.Equal(testAccountId, latestAccount.ID.StringValue())
require.LessOrEqual(tsBeforeDisconnect, *latestAccount.RemovedAt)
// The agent should receive the disconnected status in account config post disconnection
agentCheckInResp2 := testbed.CheckInAsAgentWithQS(
"aws", cloudintegrations.AgentCheckInRequest{
AccountId: testAccountId,
CloudAccountId: testAWSAccountId,
ID: testAccountId,
AccountID: testAWSAccountId,
},
)
require.Equal(testAccountId, agentCheckInResp2.AccountId)
@@ -157,13 +157,13 @@ func TestAWSIntegrationServices(t *testing.T) {
testAWSAccountId := "389389489489"
testbed.CheckInAsAgentWithQS(
"aws", cloudintegrations.AgentCheckInRequest{
AccountId: testAccountId,
CloudAccountId: testAWSAccountId,
ID: testAccountId,
AccountID: testAWSAccountId,
},
)
testSvcConfig := cloudintegrations.CloudServiceConfig{
Metrics: &cloudintegrations.CloudServiceMetricsConfig{
testSvcConfig := types.CloudServiceConfig{
Metrics: &types.CloudServiceMetricsConfig{
Enabled: true,
},
}
@@ -199,7 +199,7 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
testbed := NewCloudIntegrationsTestBed(t, nil)
// configure a connected account
testAccountConfig := cloudintegrations.AccountConfig{
testAccountConfig := types.AccountConfig{
EnabledRegions: []string{"us-east-1", "us-east-2"},
}
connectionUrlResp := testbed.GenerateConnectionUrlFromQS(
@@ -218,8 +218,8 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
testAWSAccountId := "389389489489"
checkinResp := testbed.CheckInAsAgentWithQS(
"aws", cloudintegrations.AgentCheckInRequest{
AccountId: testAccountId,
CloudAccountId: testAWSAccountId,
ID: testAccountId,
AccountID: testAWSAccountId,
},
)
@@ -237,14 +237,14 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
// helper
setServiceConfig := func(svcId string, metricsEnabled bool, logsEnabled bool) {
testSvcConfig := cloudintegrations.CloudServiceConfig{}
testSvcConfig := types.CloudServiceConfig{}
if metricsEnabled {
testSvcConfig.Metrics = &cloudintegrations.CloudServiceMetricsConfig{
testSvcConfig.Metrics = &types.CloudServiceMetricsConfig{
Enabled: metricsEnabled,
}
}
if logsEnabled {
testSvcConfig.Logs = &cloudintegrations.CloudServiceLogsConfig{
testSvcConfig.Logs = &types.CloudServiceLogsConfig{
Enabled: logsEnabled,
}
}
@@ -262,8 +262,8 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
checkinResp = testbed.CheckInAsAgentWithQS(
"aws", cloudintegrations.AgentCheckInRequest{
AccountId: testAccountId,
CloudAccountId: testAWSAccountId,
ID: testAccountId,
AccountID: testAWSAccountId,
},
)
@@ -292,13 +292,13 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
require.True(strings.HasPrefix(logGroupPrefixes[0], "/aws/rds"))
// change regions and update service configs and validate config changes for agent
testAccountConfig2 := cloudintegrations.AccountConfig{
testAccountConfig2 := types.AccountConfig{
EnabledRegions: []string{"us-east-2", "us-west-1"},
}
latestAccount := testbed.UpdateAccountConfigWithQS(
"aws", testAccountId, testAccountConfig2,
)
require.Equal(testAccountId, latestAccount.Id)
require.Equal(testAccountId, latestAccount.ID.StringValue())
require.Equal(testAccountConfig2, *latestAccount.Config)
// disable metrics for one and logs for the other.
@@ -308,8 +308,8 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
checkinResp = testbed.CheckInAsAgentWithQS(
"aws", cloudintegrations.AgentCheckInRequest{
AccountId: testAccountId,
CloudAccountId: testAWSAccountId,
ID: testAccountId,
AccountID: testAWSAccountId,
},
)
require.Equal(testAccountId, checkinResp.AccountId)
@@ -453,8 +453,8 @@ func (tb *CloudIntegrationsTestBed) CheckInAsAgentWithQS(
}
func (tb *CloudIntegrationsTestBed) UpdateAccountConfigWithQS(
cloudProvider string, accountId string, newConfig cloudintegrations.AccountConfig,
) *cloudintegrations.AccountRecord {
cloudProvider string, accountId string, newConfig types.AccountConfig,
) *types.CloudIntegration {
respDataJson := tb.RequestQS(
fmt.Sprintf(
"/api/v1/cloud-integrations/%s/accounts/%s/config",
@@ -464,7 +464,7 @@ func (tb *CloudIntegrationsTestBed) UpdateAccountConfigWithQS(
},
)
var resp cloudintegrations.AccountRecord
var resp types.CloudIntegration
err := json.Unmarshal(respDataJson, &resp)
if err != nil {
tb.t.Fatalf("could not unmarshal apiResponse.Data json into Account")
@@ -475,7 +475,7 @@ func (tb *CloudIntegrationsTestBed) UpdateAccountConfigWithQS(
func (tb *CloudIntegrationsTestBed) DisconnectAccountWithQS(
cloudProvider string, accountId string,
) *cloudintegrations.AccountRecord {
) *types.CloudIntegration {
respDataJson := tb.RequestQS(
fmt.Sprintf(
"/api/v1/cloud-integrations/%s/accounts/%s/disconnect",
@@ -483,7 +483,7 @@ func (tb *CloudIntegrationsTestBed) DisconnectAccountWithQS(
), map[string]any{},
)
var resp cloudintegrations.AccountRecord
var resp types.CloudIntegration
err := json.Unmarshal(respDataJson, &resp)
if err != nil {
tb.t.Fatalf("could not unmarshal apiResponse.Data json into Account")

View File

@@ -166,6 +166,7 @@ func createTestUser() (*types.User, *model.ApiError) {
auth.InitAuthCache(ctx)
userId := uuid.NewString()
return dao.DB().CreateUser(
ctx,
&types.User{

View File

@@ -48,10 +48,15 @@ func NewTestSqliteDB(t *testing.T) (sqlStore sqlstore.SQLStore, testDBFilePath s
sqlmigration.NewModifyDatetimeFactory(),
sqlmigration.NewModifyOrgDomainFactory(),
sqlmigration.NewUpdateOrganizationFactory(sqlStore),
sqlmigration.NewAddAlertmanagerFactory(sqlStore),
sqlmigration.NewUpdateDashboardAndSavedViewsFactory(sqlStore),
sqlmigration.NewUpdatePatAndOrgDomainsFactory(sqlStore),
sqlmigration.NewUpdatePipelines(sqlStore),
sqlmigration.NewDropLicensesSitesFactory(sqlStore),
sqlmigration.NewUpdateInvitesFactory(sqlStore),
sqlmigration.NewUpdatePatFactory(sqlStore),
sqlmigration.NewAddVirtualFieldsFactory(),
sqlmigration.NewUpdateIntegrationsFactory(sqlStore),
),
)
if err != nil {

View File

@@ -70,6 +70,7 @@ func NewSQLMigrationProviderFactories(sqlstore sqlstore.SQLStore) factory.NamedM
sqlmigration.NewUpdateApdexTtlFactory(sqlstore),
sqlmigration.NewUpdateResetPasswordFactory(sqlstore),
sqlmigration.NewAddVirtualFieldsFactory(),
sqlmigration.NewUpdateIntegrationsFactory(sqlstore),
)
}

View File

@@ -0,0 +1,441 @@
package sqlmigration
import (
"context"
"database/sql"
"time"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
"go.uber.org/zap"
)
type updateIntegrations struct {
store sqlstore.SQLStore
}
func NewUpdateIntegrationsFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("update_integrations"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return newUpdateIntegrations(ctx, ps, c, sqlstore)
})
}
func newUpdateIntegrations(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
return &updateIntegrations{
store: store,
}, nil
}
func (migration *updateIntegrations) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
type existingInstalledIntegration struct {
bun.BaseModel `bun:"table:integrations_installed"`
IntegrationID string `bun:"integration_id,pk,type:text"`
ConfigJSON string `bun:"config_json,type:text"`
InstalledAt time.Time `bun:"installed_at,default:current_timestamp"`
}
type newInstalledIntegration struct {
bun.BaseModel `bun:"table:installed_integration"`
types.Identifiable
Type string `json:"type" bun:"type,type:text,unique:org_id_type"`
Config string `json:"config" bun:"config,type:text"`
InstalledAt time.Time `json:"installed_at" bun:"installed_at,default:current_timestamp"`
OrgID string `json:"org_id" bun:"org_id,type:text,unique:org_id_type"`
}
type existingCloudIntegration struct {
bun.BaseModel `bun:"table:cloud_integrations_accounts"`
CloudProvider string `bun:"cloud_provider,type:text,unique:cloud_provider_id"`
ID string `bun:"id,type:text,notnull,unique:cloud_provider_id"`
ConfigJSON string `bun:"config_json,type:text"`
CloudAccountID string `bun:"cloud_account_id,type:text"`
LastAgentReportJSON string `bun:"last_agent_report_json,type:text"`
CreatedAt time.Time `bun:"created_at,notnull,default:current_timestamp"`
RemovedAt *time.Time `bun:"removed_at,type:timestamp"`
}
type newCloudIntegration struct {
bun.BaseModel `bun:"table:cloud_integration"`
types.Identifiable
types.TimeAuditable
Provider string `json:"provider" bun:"provider,type:text"`
Config string `json:"config" bun:"config,type:text"`
AccountID string `json:"account_id" bun:"account_id,type:text"`
LastAgentReport string `json:"last_agent_report" bun:"last_agent_report,type:text"`
RemovedAt *time.Time `json:"removed_at" bun:"removed_at,type:timestamp"`
OrgID string `json:"org_id" bun:"org_id,type:text"`
}
type existingCloudIntegrationService struct {
bun.BaseModel `bun:"table:cloud_integrations_service_configs,alias:c1"`
CloudProvider string `bun:"cloud_provider,type:text,notnull,unique:service_cloud_provider_account"`
CloudAccountID string `bun:"cloud_account_id,type:text,notnull,unique:service_cloud_provider_account"`
ServiceID string `bun:"service_id,type:text,notnull,unique:service_cloud_provider_account"`
ConfigJSON string `bun:"config_json,type:text"`
CreatedAt time.Time `bun:"created_at,default:current_timestamp"`
}
type newCloudIntegrationService struct {
bun.BaseModel `bun:"table:cloud_integration_service,alias:cis"`
types.Identifiable
types.TimeAuditable
Type string `bun:"type,type:text,notnull,unique:cloud_integration_id_type"`
Config string `bun:"config,type:text"`
CloudIntegrationID string `bun:"cloud_integration_id,type:text,notnull,unique:cloud_integration_id_type"`
}
type StorablePersonalAccessToken struct {
bun.BaseModel `bun:"table:personal_access_token"`
types.Identifiable
types.TimeAuditable
OrgID string `json:"orgId" bun:"org_id,type:text,notnull"`
Role string `json:"role" bun:"role,type:text,notnull,default:'ADMIN'"`
UserID string `json:"userId" bun:"user_id,type:text,notnull"`
Token string `json:"token" bun:"token,type:text,notnull,unique"`
Name string `json:"name" bun:"name,type:text,notnull"`
ExpiresAt int64 `json:"expiresAt" bun:"expires_at,notnull,default:0"`
LastUsed int64 `json:"lastUsed" bun:"last_used,notnull,default:0"`
Revoked bool `json:"revoked" bun:"revoked,notnull,default:false"`
UpdatedByUserID string `json:"updatedByUserId" bun:"updated_by_user_id,type:text,notnull,default:''"`
}
func (migration *updateIntegrations) Up(ctx context.Context, db *bun.DB) error {
// begin transaction
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer tx.Rollback()
// don't run the migration if there are multiple org ids
orgIDs := make([]string, 0)
err = migration.store.BunDB().NewSelect().Model((*types.Organization)(nil)).Column("id").Scan(ctx, &orgIDs)
if err != nil {
return err
}
if len(orgIDs) > 1 {
return nil
}
// ---
// installed integrations
// ---
err = migration.
store.
Dialect().
RenameTableAndModifyModel(ctx, tx, new(existingInstalledIntegration), new(newInstalledIntegration), []string{OrgReference}, func(ctx context.Context) error {
existingIntegrations := make([]*existingInstalledIntegration, 0)
err = tx.
NewSelect().
Model(&existingIntegrations).
Scan(ctx)
if err != nil {
if err != sql.ErrNoRows {
return err
}
}
if err == nil && len(existingIntegrations) > 0 {
newIntegrations := migration.
CopyOldIntegrationsToNewIntegrations(tx, orgIDs[0], existingIntegrations)
_, err = tx.
NewInsert().
Model(&newIntegrations).
Exec(ctx)
if err != nil {
return err
}
}
return nil
})
if err != nil {
return err
}
// ---
// cloud integrations
// ---
err = migration.
store.
Dialect().
RenameTableAndModifyModel(ctx, tx, new(existingCloudIntegration), new(newCloudIntegration), []string{OrgReference}, func(ctx context.Context) error {
existingIntegrations := make([]*existingCloudIntegration, 0)
err = tx.
NewSelect().
Model(&existingIntegrations).
Where("removed_at IS NULL"). // we will only copy the accounts that are not removed
Scan(ctx)
if err != nil {
if err != sql.ErrNoRows {
return err
}
}
if err == nil && len(existingIntegrations) > 0 {
newIntegrations := migration.
CopyOldCloudIntegrationsToNewCloudIntegrations(tx, orgIDs[0], existingIntegrations)
_, err = tx.
NewInsert().
Model(&newIntegrations).
Exec(ctx)
if err != nil {
return err
}
}
return nil
})
if err != nil {
return err
}
// add unique constraint to cloud_integration table
_, err = tx.ExecContext(ctx, `CREATE UNIQUE INDEX IF NOT EXISTS unique_cloud_integration ON cloud_integration (id, provider, org_id)`)
if err != nil {
return err
}
// ---
// cloud integration service
// ---
err = migration.
store.
Dialect().
RenameTableAndModifyModel(ctx, tx, new(existingCloudIntegrationService), new(newCloudIntegrationService), []string{CloudIntegrationReference}, func(ctx context.Context) error {
existingServices := make([]*existingCloudIntegrationService, 0)
// only one service per provider,account id and type
// so there won't be any duplicates.
// just that these will be enabled as soon as the integration for the account is enabled
err = tx.
NewSelect().
Model(&existingServices).
Scan(ctx)
if err != nil {
if err != sql.ErrNoRows {
return err
}
}
if err == nil && len(existingServices) > 0 {
newServices := migration.
CopyOldCloudIntegrationServicesToNewCloudIntegrationServices(tx, orgIDs[0], existingServices)
if len(newServices) > 0 {
_, err = tx.
NewInsert().
Model(&newServices).
Exec(ctx)
if err != nil {
return err
}
}
}
return nil
})
if err != nil {
return err
}
if len(orgIDs) == 0 {
err = tx.Commit()
if err != nil {
return err
}
return nil
}
// copy the old aws integration user to the new user
err = migration.copyOldAwsIntegrationUser(tx, orgIDs[0])
if err != nil {
return err
}
err = tx.Commit()
if err != nil {
return err
}
return nil
}
func (migration *updateIntegrations) Down(ctx context.Context, db *bun.DB) error {
return nil
}
func (migration *updateIntegrations) CopyOldIntegrationsToNewIntegrations(tx bun.IDB, orgID string, existingIntegrations []*existingInstalledIntegration) []*newInstalledIntegration {
newIntegrations := make([]*newInstalledIntegration, 0)
for _, integration := range existingIntegrations {
newIntegrations = append(newIntegrations, &newInstalledIntegration{
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
Type: integration.IntegrationID,
Config: integration.ConfigJSON,
InstalledAt: integration.InstalledAt,
OrgID: orgID,
})
}
return newIntegrations
}
func (migration *updateIntegrations) CopyOldCloudIntegrationsToNewCloudIntegrations(tx bun.IDB, orgID string, existingIntegrations []*existingCloudIntegration) []*newCloudIntegration {
newIntegrations := make([]*newCloudIntegration, 0)
for _, integration := range existingIntegrations {
newIntegrations = append(newIntegrations, &newCloudIntegration{
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
TimeAuditable: types.TimeAuditable{
CreatedAt: integration.CreatedAt,
UpdatedAt: integration.CreatedAt,
},
Provider: integration.CloudProvider,
AccountID: integration.CloudAccountID,
Config: integration.ConfigJSON,
LastAgentReport: integration.LastAgentReportJSON,
RemovedAt: integration.RemovedAt,
OrgID: orgID,
})
}
return newIntegrations
}
func (migration *updateIntegrations) CopyOldCloudIntegrationServicesToNewCloudIntegrationServices(tx bun.IDB, orgID string, existingServices []*existingCloudIntegrationService) []*newCloudIntegrationService {
newServices := make([]*newCloudIntegrationService, 0)
for _, service := range existingServices {
var cloudIntegrationID string
err := tx.NewSelect().
Model((*newCloudIntegration)(nil)).
Column("id").
Where("account_id = ?", service.CloudAccountID).
Where("provider = ?", service.CloudProvider).
Where("org_id = ?", orgID).
Scan(context.Background(), &cloudIntegrationID)
if err != nil {
if err == sql.ErrNoRows {
continue
}
zap.L().Error("failed to get cloud integration id", zap.Error(err))
return nil
}
newServices = append(newServices, &newCloudIntegrationService{
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
TimeAuditable: types.TimeAuditable{
CreatedAt: service.CreatedAt,
UpdatedAt: service.CreatedAt,
},
Type: service.ServiceID,
Config: service.ConfigJSON,
CloudIntegrationID: cloudIntegrationID,
})
}
return newServices
}
func (migration *updateIntegrations) copyOldAwsIntegrationUser(tx bun.IDB, orgID string) error {
user := &types.User{}
err := tx.NewSelect().Model(user).Where("email = ?", "aws-integration@signoz.io").Scan(context.Background())
if err != nil {
if err == sql.ErrNoRows {
return nil
}
return err
}
// check if the id is already an uuid
if _, err := uuid.Parse(user.ID); err == nil {
return nil
}
// new user
newUser := &types.User{
ID: uuid.New().String(),
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
OrgID: orgID,
Name: user.Name,
Email: user.Email,
GroupID: user.GroupID,
Password: user.Password,
}
// get the pat for old user
pat := &StorablePersonalAccessToken{}
err = tx.NewSelect().Model(pat).Where("user_id = ? and revoked = false", "aws-integration").Scan(context.Background())
if err != nil {
if err == sql.ErrNoRows {
// delete the old user
_, err = tx.ExecContext(context.Background(), `DELETE FROM users WHERE id = ?`, user.ID)
if err != nil {
return err
}
return nil
}
return err
}
// new pat
newPAT := &StorablePersonalAccessToken{
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
OrgID: orgID,
UserID: newUser.ID,
Token: pat.Token,
Name: pat.Name,
ExpiresAt: pat.ExpiresAt,
LastUsed: pat.LastUsed,
Revoked: pat.Revoked,
Role: pat.Role,
}
// delete old user
_, err = tx.ExecContext(context.Background(), `DELETE FROM users WHERE id = ?`, user.ID)
if err != nil {
return err
}
// insert the new user
_, err = tx.NewInsert().Model(newUser).Exec(context.Background())
if err != nil {
return err
}
// insert the new pat
_, err = tx.NewInsert().Model(newPAT).Exec(context.Background())
if err != nil {
return err
}
return nil
}

View File

@@ -26,8 +26,9 @@ var (
)
var (
OrgReference = "org"
UserReference = "user"
OrgReference = "org"
UserReference = "user"
CloudIntegrationReference = "cloud_integration"
)
func New(

View File

@@ -17,13 +17,15 @@ var (
)
var (
Org = "org"
User = "user"
Org = "org"
User = "user"
CloudIntegration = "cloud_integration"
)
var (
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
)
type dialect struct {
@@ -202,6 +204,8 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
fkReferences = append(fkReferences, OrgReference)
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
fkReferences = append(fkReferences, UserReference)
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
fkReferences = append(fkReferences, CloudIntegrationReference)
}
}

View File

@@ -3,10 +3,13 @@ package telemetrylogs
import (
"context"
"fmt"
"strconv"
"strings"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/huandu/go-sqlbuilder"
)
@@ -81,9 +84,13 @@ func (c *conditionBuilder) GetColumn(ctx context.Context, key *telemetrytypes.Te
case telemetrytypes.FieldDataTypeBool:
return logsV2Columns["attributes_bool"], nil
}
case telemetrytypes.FieldContextLog:
case telemetrytypes.FieldContextLog, telemetrytypes.FieldContextUnspecified:
col, ok := logsV2Columns[key.Name]
if !ok {
// check if the key has body JSON search
if strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
return logsV2Columns["body"], nil
}
return nil, qbtypes.ErrColumnNotFound
}
return col, nil
@@ -137,6 +144,83 @@ func (c *conditionBuilder) GetTableFieldName(ctx context.Context, key *telemetry
return column.Name, nil
}
func parseStrValue(valueStr string, operator qbtypes.FilterOperator) (telemetrytypes.FieldDataType, any) {
valueType := telemetrytypes.FieldDataTypeString
// return the value as is for the following operators
// as they are always string
if operator == qbtypes.FilterOperatorContains || operator == qbtypes.FilterOperatorNotContains ||
operator == qbtypes.FilterOperatorRegexp || operator == qbtypes.FilterOperatorNotRegexp ||
operator == qbtypes.FilterOperatorLike || operator == qbtypes.FilterOperatorNotLike ||
operator == qbtypes.FilterOperatorILike || operator == qbtypes.FilterOperatorNotILike {
return valueType, valueStr
}
var err error
var parsedValue any
if parsedValue, err = strconv.ParseBool(valueStr); err == nil {
valueType = telemetrytypes.FieldDataTypeBool
} else if parsedValue, err = strconv.ParseInt(valueStr, 10, 64); err == nil {
valueType = telemetrytypes.FieldDataTypeInt64
} else if parsedValue, err = strconv.ParseFloat(valueStr, 64); err == nil {
valueType = telemetrytypes.FieldDataTypeFloat64
} else {
parsedValue = valueStr
valueType = telemetrytypes.FieldDataTypeString
}
return valueType, parsedValue
}
func inferDataType(value any, operator qbtypes.FilterOperator, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any) {
// check if the value is a int, float, string, bool
valueType := telemetrytypes.FieldDataTypeUnspecified
switch v := value.(type) {
case []any:
// take the first element and infer the type
if len(v) > 0 {
valueType, _ = inferDataType(v[0], operator, key)
}
return valueType, v
case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64:
valueType = telemetrytypes.FieldDataTypeInt64
case float32, float64:
valueType = telemetrytypes.FieldDataTypeFloat64
case string:
valueType, value = parseStrValue(v, operator)
case bool:
valueType = telemetrytypes.FieldDataTypeBool
}
// check if it is array
if strings.HasSuffix(key.Name, "[*]") {
valueType = telemetrytypes.FieldDataType{String: valuer.NewString(fmt.Sprintf("[]%s", valueType.StringValue()))}
}
return valueType, value
}
func GetBodyJSONKey(_ context.Context, key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any) (string, any) {
dataType, value := inferDataType(value, operator, key)
// all body json keys are of the form body.
path := strings.Join(strings.Split(key.Name, ".")[1:], ".")
// for array types, we need to extract the value from the JSON_QUERY
if dataType == telemetrytypes.FieldDataTypeArrayInt64 ||
dataType == telemetrytypes.FieldDataTypeArrayFloat64 ||
dataType == telemetrytypes.FieldDataTypeArrayString ||
dataType == telemetrytypes.FieldDataTypeArrayBool ||
dataType == telemetrytypes.FieldDataTypeArrayNumber {
return fmt.Sprintf("JSONExtract(JSON_QUERY(body, '$.%s'), '%s')", path, dataType.CHDataType()), value
}
// for all other types, we need to extract the value from the JSON_VALUE
return fmt.Sprintf("JSONExtract(JSON_VALUE(body, '$.%s'), '%s')", path, dataType.CHDataType()), value
}
func (c *conditionBuilder) GetCondition(
ctx context.Context,
key *telemetrytypes.TelemetryFieldKey,
@@ -154,6 +238,10 @@ func (c *conditionBuilder) GetCondition(
return "", err
}
if strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
tblFieldName, value = GetBodyJSONKey(ctx, key, operator, value)
}
tblFieldName, value = telemetrytypes.DataTypeCollisionHandledFieldName(key, value, tblFieldName)
// regular operators

Some files were not shown because too many files have changed in this diff Show More