Compare commits

..

4 Commits

Author SHA1 Message Date
Piyush Singariya
c9f5b16c37 chore: restructuring looks ok 2025-04-21 17:39:19 +05:30
Piyush Singariya
55837dacb5 chore: in progress 2025-04-18 17:37:50 +05:30
Piyush Singariya
14248968cb chore: trying restructuring 2025-04-18 16:26:32 +05:30
Piyush Singariya
a436ae900c feat: introducing S3 sync as AWS integration 2025-04-16 18:45:36 +05:30
127 changed files with 1342 additions and 5867 deletions

View File

@@ -1,7 +1,6 @@
.git
.github
.vscode
.devenv
README.md
deploy
sample-apps

View File

@@ -73,7 +73,7 @@ jobs:
uses: actions/cache@v4
with:
path: frontend/.env
key: enterprise-dotenv-${{ github.sha }}
key: dotenv-${{ github.sha }}
js-build:
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
needs: prepare
@@ -81,10 +81,10 @@ jobs:
with:
PRIMUS_REF: main
JS_SRC: frontend
JS_INPUT_ARTIFACT_CACHE_KEY: enterprise-dotenv-${{ github.sha }}
JS_INPUT_ARTIFACT_CACHE_KEY: dotenv-${{ github.sha }}
JS_INPUT_ARTIFACT_PATH: frontend/.env
JS_OUTPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
JS_OUTPUT_ARTIFACT_PATH: frontend/build
JS_OUTPUT_ARTIFACT_CACHE_KEY: jsbuild-${{ github.sha }}
JS_OUTPUT_ARTIFACT_PATH: frontend/build
DOCKER_BUILD: false
DOCKER_MANIFEST: false
go-build:
@@ -93,7 +93,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_CACHE_KEY: jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build
GO_BUILD_CONTEXT: ./ee/query-service
GO_BUILD_FLAGS: >-

View File

@@ -64,13 +64,22 @@ jobs:
run: |
mkdir -p frontend
echo 'CI=1' > frontend/.env
echo 'TUNNEL_URL=https://telemetry.staging.signoz.cloud/tunnel' >> frontend/.env
echo 'TUNNEL_DOMAIN=https://telemetry.staging.signoz.cloud' >> frontend/.env
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' >> frontend/.env
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env
echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env
- name: cache-dotenv
uses: actions/cache@v4
with:
path: frontend/.env
key: staging-dotenv-${{ github.sha }}
key: dotenv-${{ github.sha }}
js-build:
uses: signoz/primus.workflows/.github/workflows/js-build.yaml@main
needs: prepare
@@ -78,9 +87,9 @@ jobs:
with:
PRIMUS_REF: main
JS_SRC: frontend
JS_INPUT_ARTIFACT_CACHE_KEY: staging-dotenv-${{ github.sha }}
JS_INPUT_ARTIFACT_CACHE_KEY: dotenv-${{ github.sha }}
JS_INPUT_ARTIFACT_PATH: frontend/.env
JS_OUTPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
JS_OUTPUT_ARTIFACT_CACHE_KEY: jsbuild-${{ github.sha }}
JS_OUTPUT_ARTIFACT_PATH: frontend/build
DOCKER_BUILD: false
DOCKER_MANIFEST: false
@@ -90,7 +99,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_CACHE_KEY: jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build
GO_BUILD_CONTEXT: ./ee/query-service
GO_BUILD_FLAGS: >-

View File

@@ -1,55 +0,0 @@
name: integrationci
on:
pull_request:
types:
- labeled
pull_request_target:
types:
- labeled
jobs:
test:
strategy:
fail-fast: false
matrix:
src:
- bootstrap
sqlstore-provider:
- postgres
- sqlite
clickhouse-version:
- 24.1.2-alpine
- 24.12-alpine
schema-migrator-version:
- v0.111.38
postgres-version:
- 15
if: |
((github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))) && contains(github.event.pull_request.labels.*.name, 'safe-to-integrate')
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v4
- name: python
uses: actions/setup-python@v5
with:
python-version: 3.13
- name: poetry
run: |
python -m pip install poetry==2.1.2
python -m poetry config virtualenvs.in-project true
cd tests/integration && poetry install --no-root
- name: run
run: |
cd tests/integration && \
poetry run pytest -ra \
--basetemp=./tmp/ \
-vv \
--capture=no \
src/${{matrix.src}} \
--sqlstore-provider ${{matrix.sqlstore-provider}} \
--postgres-version ${{matrix.postgres-version}} \
--clickhouse-version ${{matrix.clickhouse-version}} \
--schema-migrator-version ${{matrix.schema-migrator-version}}

View File

@@ -1,9 +1,9 @@
name: prereleaser
on:
# schedule every wednesday 6:30 AM UTC (12:00 PM IST)
# schedule every wednesday 9:30 AM UTC (3pm IST)
schedule:
- cron: '30 6 * * 3'
- cron: '30 9 * * 3'
# allow manual triggering of the workflow by a maintainer
workflow_dispatch:

147
.gitignore vendored
View File

@@ -80,153 +80,6 @@ deploy/common/clickhouse/user_scripts/
queries.active
# tmp
**/tmp/**
# .devenv tmp files
.devenv/**/tmp/**
.qodo
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
### Python Patch ###
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
poetry.toml
# ruff
.ruff_cache/
# LSP config files
pyrightconfig.json
# End of https://www.toptal.com/developers/gitignore/api/python

View File

@@ -10,7 +10,7 @@ COMMIT_SHORT_SHA ?= $(shell git rev-parse --short HEAD)
BRANCH_NAME ?= $(subst /,-,$(shell git rev-parse --abbrev-ref HEAD))
VERSION ?= $(BRANCH_NAME)-$(COMMIT_SHORT_SHA)
TIMESTAMP ?= $(shell date -u +"%Y-%m-%dT%H:%M:%SZ")
ARCHS ?= amd64 arm64
ARCHS = amd64 arm64
TARGET_DIR ?= $(shell pwd)/target
ZEUS_URL ?= https://api.signoz.cloud
@@ -23,7 +23,6 @@ GO_BUILD_ARCHS_COMMUNITY = $(addprefix go-build-community-,$(ARCHS))
GO_BUILD_CONTEXT_COMMUNITY = $(SRC)/pkg/query-service
GO_BUILD_LDFLAGS_COMMUNITY = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=community
GO_BUILD_ARCHS_ENTERPRISE = $(addprefix go-build-enterprise-,$(ARCHS))
GO_BUILD_ARCHS_ENTERPRISE_RACE = $(addprefix go-build-enterprise-race-,$(ARCHS))
GO_BUILD_CONTEXT_ENTERPRISE = $(SRC)/ee/query-service
GO_BUILD_LDFLAGS_ENTERPRISE = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=enterprise $(GO_BUILD_LDFLAG_ZEUS_URL) $(GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO)
@@ -120,18 +119,6 @@ $(GO_BUILD_ARCHS_ENTERPRISE): go-build-enterprise-%: $(TARGET_DIR)
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
fi
.PHONY: go-build-enterprise-race $(GO_BUILD_ARCHS_ENTERPRISE_RACE)
go-build-enterprise-race: ## Builds the go backend server for enterprise with race
go-build-enterprise-race: $(GO_BUILD_ARCHS_ENTERPRISE_RACE)
$(GO_BUILD_ARCHS_ENTERPRISE_RACE): go-build-enterprise-race-%: $(TARGET_DIR)
@mkdir -p $(TARGET_DIR)/$(OS)-$*
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)"
@if [ $* = "arm64" ]; then \
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
else \
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
fi
##############################################################
# js commands
##############################################################
@@ -180,20 +167,3 @@ docker-buildx-enterprise: go-build-enterprise js-build
--platform linux/arm64,linux/amd64 \
--push \
--tag $(DOCKER_REGISTRY_ENTERPRISE):$(VERSION) $(SRC)
##############################################################
# python commands
##############################################################
.PHONY: py-fmt
py-fmt: ## Run black for integration tests
@cd tests/integration && poetry run black .
.PHONY: py-lint
py-lint: ## Run lint for integration tests
@cd tests/integration && poetry run isort .
@cd tests/integration && poetry run autoflake .
@cd tests/integration && poetry run pylint .
.PHONY: py-test
py-test: ## Runs integration tests
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/

View File

@@ -174,7 +174,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.79.1
image: signoz/signoz:v0.78.1
command:
- --config=/root/config/prometheus.yml
- --use-logs-new-schema=true
@@ -208,7 +208,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.39
image: signoz/signoz-otel-collector:v0.111.38
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -232,7 +232,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.39
image: signoz/signoz-schema-migrator:v0.111.38
deploy:
restart_policy:
condition: on-failure

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.79.1
image: signoz/signoz:v0.78.1
command:
- --config=/root/config/prometheus.yml
- --use-logs-new-schema=true
@@ -143,7 +143,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.39
image: signoz/signoz-otel-collector:v0.111.38
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -167,7 +167,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.39
image: signoz/signoz-schema-migrator:v0.111.38
deploy:
restart_policy:
condition: on-failure

View File

@@ -177,7 +177,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.79.1}
image: signoz/signoz:${VERSION:-v0.78.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -212,7 +212,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.38}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -238,7 +238,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
container_name: schema-migrator-sync
command:
- sync
@@ -249,7 +249,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
container_name: schema-migrator-async
command:
- async

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.79.1}
image: signoz/signoz:${VERSION:-v0.78.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -146,7 +146,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.38}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -168,7 +168,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
container_name: schema-migrator-sync
command:
- sync
@@ -180,7 +180,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
container_name: schema-migrator-async
command:
- async

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.79.1}
image: signoz/signoz:${VERSION:-v0.78.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -144,7 +144,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.39}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.38}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -166,7 +166,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
container_name: schema-migrator-sync
command:
- sync
@@ -178,7 +178,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.39}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.38}
container_name: schema-migrator-async
command:
- async

View File

@@ -1,36 +0,0 @@
FROM golang:1.22-bullseye
ARG OS="linux"
ARG TARGETARCH
ARG ZEUSURL
# This path is important for stacktraces
WORKDIR $GOPATH/src/github.com/signoz/signoz
WORKDIR /root
RUN set -eux; \
apt-get update; \
apt-get install -y --no-install-recommends \
g++ \
gcc \
libc6-dev \
make \
pkg-config \
; \
rm -rf /var/lib/apt/lists/*
COPY go.mod go.sum ./
RUN go mod download
COPY ./ee/ ./ee/
COPY ./pkg/ ./pkg/
COPY ./templates/email /root/templates
COPY Makefile Makefile
RUN TARGET_DIR=/root ARCHS=${TARGETARCH} ZEUS_URL=${ZEUSURL} LICENSE_URL=${ZEUSURL}/api/v1 make go-build-enterprise-race
RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz
RUN chmod 755 /root /root/signoz
ENTRYPOINT ["/root/signoz"]

View File

@@ -153,11 +153,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
ctx context.Context, orgId string, cloudProvider string,
) (*types.User, *basemodel.ApiError) {
cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider)
email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser)
cloudIntegrationUserId := fmt.Sprintf("%s-integration", cloudProvider)
// TODO(nitya): there should be orgId here
integrationUserResult, apiErr := ah.AppDao().GetUserByEmail(ctx, email)
integrationUserResult, apiErr := ah.AppDao().GetUser(ctx, cloudIntegrationUserId)
if apiErr != nil {
return nil, basemodel.WrapApiError(apiErr, "couldn't look for integration user")
}
@@ -172,9 +170,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
)
newUser := &types.User{
ID: uuid.New().String(),
Name: cloudIntegrationUser,
Email: email,
ID: cloudIntegrationUserId,
Name: fmt.Sprintf("%s integration", cloudProvider),
Email: fmt.Sprintf("%s@signoz.io", cloudIntegrationUserId),
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
},

View File

@@ -5,18 +5,16 @@ import (
"encoding/json"
"fmt"
"net/http"
"slices"
"time"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/ee/types"
eeTypes "github.com/SigNoz/signoz/ee/types"
"github.com/SigNoz/signoz/pkg/errors"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/query-service/auth"
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
"go.uber.org/zap"
@@ -60,7 +58,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
ah.Respond(w, &pat)
}
func validatePATRequest(req eeTypes.GettablePAT) error {
func validatePATRequest(req types.GettablePAT) error {
if req.Role == "" || (req.Role != baseconstants.ViewerGroup && req.Role != baseconstants.EditorGroup && req.Role != baseconstants.AdminGroup) {
return fmt.Errorf("valid role is required")
}
@@ -76,19 +74,12 @@ func validatePATRequest(req eeTypes.GettablePAT) error {
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
req := eeTypes.GettablePAT{}
req := types.GettablePAT{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
idStr := mux.Vars(r)["id"]
id, err := valuer.NewUUID(idStr)
if err != nil {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
return
}
user, err := auth.GetUserFromReqContext(r.Context())
if err != nil {
RespondError(w, &model.ApiError{
@@ -98,25 +89,6 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
return
}
//get the pat
existingPAT, paterr := ah.AppDao().GetPATByID(ctx, user.OrgID, id)
if paterr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
return
}
// get the user
createdByUser, usererr := ah.AppDao().GetUser(ctx, existingPAT.UserID)
if usererr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
return
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
return
}
err = validatePATRequest(req)
if err != nil {
RespondError(w, model.BadRequest(err), nil)
@@ -124,6 +96,12 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
}
req.UpdatedByUserID = user.ID
idStr := mux.Vars(r)["id"]
id, err := valuer.NewUUID(idStr)
if err != nil {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
return
}
req.UpdatedAt = time.Now()
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
var apierr basemodel.BaseApiError
@@ -171,25 +149,6 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
return
}
//get the pat
existingPAT, paterr := ah.AppDao().GetPATByID(ctx, user.OrgID, id)
if paterr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
return
}
// get the user
createdByUser, usererr := ah.AppDao().GetUser(ctx, existingPAT.UserID)
if usererr != nil {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
return
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
return
}
zap.L().Info("Revoke PAT with id", zap.String("id", id.StringValue()))
if apierr := ah.AppDao().RevokePAT(ctx, user.OrgID, id, user.ID); apierr != nil {
RespondError(w, apierr, nil)

View File

@@ -8,6 +8,7 @@ import (
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
ossTypes "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
@@ -39,6 +40,7 @@ type ModelDao interface {
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError)
GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError)
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError
}

View File

@@ -43,7 +43,7 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
}
user := &types.User{
ID: uuid.New().String(),
ID: uuid.NewString(),
Name: "",
Email: email,
Password: hash,

View File

@@ -196,3 +196,27 @@ func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID)
return &patWithUser, nil
}
// deprecated
func (m *modelDao) GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError) {
users := []ossTypes.GettableUser{}
if err := m.DB().NewSelect().
Model(&users).
Column("u.id", "u.name", "u.email", "u.password", "u.created_at", "u.profile_picture_url", "u.org_id", "u.group_id").
Join("JOIN personal_access_tokens p ON u.id = p.user_id").
Where("p.token = ?", token).
Where("p.expires_at >= strftime('%s', 'now')").
Where("p.org_id = ?", orgID).
Scan(ctx); err != nil {
return nil, model.InternalError(fmt.Errorf("failed to fetch user from PAT, err: %v", err))
}
if len(users) != 1 {
return nil, &model.ApiError{
Typ: model.ErrorInternal,
Err: fmt.Errorf("found zero or multiple users with same PAT token"),
}
}
return &users[0], nil
}

View File

@@ -17,15 +17,13 @@ var (
)
var (
Org = "org"
User = "user"
CloudIntegration = "cloud_integration"
Org = "org"
User = "user"
)
var (
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
)
type dialect struct {
@@ -213,8 +211,6 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
fkReferences = append(fkReferences, OrgReference)
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
fkReferences = append(fkReferences, UserReference)
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
fkReferences = append(fkReferences, CloudIntegrationReference)
}
}

6
go.mod
View File

@@ -10,8 +10,7 @@ require (
github.com/ClickHouse/clickhouse-go/v2 v2.30.0
github.com/DATA-DOG/go-sqlmock v1.5.2
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
github.com/SigNoz/signoz-otel-collector v0.111.39
github.com/antlr4-go/antlr/v4 v4.13.1
github.com/SigNoz/signoz-otel-collector v0.111.16
github.com/antonmedv/expr v1.15.3
github.com/cespare/xxhash/v2 v2.3.0
github.com/coreos/go-oidc/v3 v3.11.0
@@ -90,9 +89,10 @@ require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect
github.com/ClickHouse/ch-go v0.63.1 // indirect
github.com/ClickHouse/ch-go v0.61.5 // indirect
github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b // indirect
github.com/andybalholm/brotli v1.1.1 // indirect
github.com/antlr4-go/antlr/v4 v4.13.1 // indirect
github.com/armon/go-metrics v0.4.1 // indirect
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
github.com/aws/aws-sdk-go v1.55.5 // indirect

14
go.sum
View File

@@ -85,8 +85,8 @@ github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mx
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/ClickHouse/ch-go v0.63.1 h1:s2JyZvWLTCSAGdtjMBBmAgQQHMco6pawLJMOXi0FODM=
github.com/ClickHouse/ch-go v0.63.1/go.mod h1:I1kJJCL3WJcBMGe1m+HVK0+nREaG+JOYYBWjrDrF3R0=
github.com/ClickHouse/ch-go v0.61.5 h1:zwR8QbYI0tsMiEcze/uIMK+Tz1D3XZXLdNrlaOpeEI4=
github.com/ClickHouse/ch-go v0.61.5/go.mod h1:s1LJW/F/LcFs5HJnuogFMta50kKDO0lf9zzfrbl0RQg=
github.com/ClickHouse/clickhouse-go/v2 v2.30.0 h1:AG4D/hW39qa58+JHQIFOSnxyL46H6h2lrmGGk17dhFo=
github.com/ClickHouse/clickhouse-go/v2 v2.30.0/go.mod h1:i9ZQAojcayW3RsdCb3YR+n+wC2h65eJsZCscZ1Z1wyo=
github.com/Code-Hex/go-generics-cache v1.5.1 h1:6vhZGc5M7Y/YD8cIUcY8kcuQLB4cHR7U+0KMqAA0KcU=
@@ -100,10 +100,8 @@ github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8=
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
github.com/SigNoz/signoz-otel-collector v0.111.39-beta.1 h1:ZpSNrOZBOH2iCJIPeER5X0mfxOe64yP3JRX7FzBNfwY=
github.com/SigNoz/signoz-otel-collector v0.111.39-beta.1/go.mod h1:DCu/D+lqhsPNSGS4IMD+4gn7q06TGzOCKazSy+GURVc=
github.com/SigNoz/signoz-otel-collector v0.111.39 h1:Dl8QqZNAsj2atxP572OzsszPK0XPpd3LLPNPRAUJ5wo=
github.com/SigNoz/signoz-otel-collector v0.111.39/go.mod h1:DCu/D+lqhsPNSGS4IMD+4gn7q06TGzOCKazSy+GURVc=
github.com/SigNoz/signoz-otel-collector v0.111.16 h1:535uKH5Oux+35EsI+L3C6pnAP/Ye0PTCbVizXoL+VqE=
github.com/SigNoz/signoz-otel-collector v0.111.16/go.mod h1:HJ4m0LY1MPsuZmuRF7Ixb+bY8rxgRzI0VXzOedESsjg=
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
@@ -822,8 +820,8 @@ github.com/prometheus/prometheus v0.300.1/go.mod h1:gtTPY/XVyCdqqnjA3NzDMb0/nc5H
github.com/puzpuzpuz/xsync/v3 v3.5.0 h1:i+cMcpEDY1BkNm7lPDkCtE4oElsYLn+EKF8kAu2vXT4=
github.com/puzpuzpuz/xsync/v3 v3.5.0/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/redis/go-redis/v9 v9.6.3 h1:8Dr5ygF1QFXRxIH/m3Xg9MMG1rS8YCtAgosrsewT6i0=
github.com/redis/go-redis/v9 v9.6.3/go.mod h1:0C0c6ycQsdpVNQpxb1njEQIqkx5UcsM8FJCQLgE9+RA=
github.com/redis/go-redis/v9 v9.6.1 h1:HHDteefn6ZkTtY5fGUE8tj8uy85AHk6zP7CpzIAM0y4=
github.com/redis/go-redis/v9 v9.6.1/go.mod h1:0C0c6ycQsdpVNQpxb1njEQIqkx5UcsM8FJCQLgE9+RA=
github.com/rhnvrm/simples3 v0.6.1/go.mod h1:Y+3vYm2V7Y4VijFoJHHTrja6OgPrJ2cBti8dPGkC3sA=
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=

View File

@@ -104,7 +104,7 @@ fullText
* ...
*/
functionCall
: (HAS | HASANY | HASALL) LPAREN functionParamList RPAREN
: (HAS | HASANY | HASALL | HASNONE) LPAREN functionParamList RPAREN
;
// Function parameters can be keys, single scalar values, or arrays
@@ -182,6 +182,7 @@ OR : [Oo][Rr] ;
HAS : [Hh][Aa][Ss] ;
HASANY : [Hh][Aa][Ss][Aa][Nn][Yy] ;
HASALL : [Hh][Aa][Ss][Aa][Ll][Ll] ;
HASNONE : [Hh][Aa][Ss][Nn][Oo][Nn][Ee] ;
// Potential boolean constants
BOOL
@@ -204,7 +205,7 @@ QUOTED_TEXT
// Keys can have letters, digits, underscores, dots, and bracket pairs
// e.g. service.name, service.namespace, db.queries[].query_duration
KEY
: [a-zA-Z0-9_] [a-zA-Z0-9_.*[\]]*
: [a-zA-Z0-9_] [a-zA-Z0-9_.[\]]*
;
// Ignore whitespace
@@ -217,4 +218,4 @@ fragment DIGIT
: [0-9]
;
FREETEXT : (~[ \t\r\n=()'"<>!,[\]])+ ;
FREETEXT : (~[ \t\r\n=()'"<>![\]])+ ;

View File

@@ -25,25 +25,6 @@ type postableAlert struct {
Receivers []string `json:"receivers"`
}
func (pa *postableAlert) MarshalJSON() ([]byte, error) {
// Marshal the embedded PostableAlert to get its JSON representation.
alertJSON, err := json.Marshal(pa.PostableAlert)
if err != nil {
return nil, err
}
// Unmarshal that JSON into a map so we can add extra fields.
var m map[string]interface{}
if err := json.Unmarshal(alertJSON, &m); err != nil {
return nil, err
}
// Add the Receivers field.
m["receivers"] = pa.Receivers
return json.Marshal(m)
}
const (
alertsPath string = "/v1/alerts"
routesPath string = "/v1/routes"

View File

@@ -1,35 +0,0 @@
package legacyalertmanager
import (
"encoding/json"
"testing"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/prometheus/alertmanager/api/v2/models"
"github.com/stretchr/testify/assert"
)
func TestProvider_TestAlert(t *testing.T) {
pa := &postableAlert{
PostableAlert: &alertmanagertypes.PostableAlert{
Alert: models.Alert{
Labels: models.LabelSet{
"alertname": "test",
},
GeneratorURL: "http://localhost:9090/graph?g0.expr=up&g0.tab=1",
},
Annotations: models.LabelSet{
"summary": "test",
},
},
Receivers: []string{"receiver1", "receiver2"},
}
body, err := json.Marshal(pa)
if err != nil {
t.Fatalf("failed to marshal postable alert: %v", err)
}
assert.Contains(t, string(body), "receiver1")
assert.Contains(t, string(body), "receiver2")
}

File diff suppressed because one or more lines are too long

View File

@@ -25,12 +25,13 @@ OR=24
HAS=25
HASANY=26
HASALL=27
BOOL=28
NUMBER=29
QUOTED_TEXT=30
KEY=31
WS=32
FREETEXT=33
HASNONE=28
BOOL=29
NUMBER=30
QUOTED_TEXT=31
KEY=32
WS=33
FREETEXT=34
'('=1
')'=2
'['=3

File diff suppressed because one or more lines are too long

View File

@@ -25,12 +25,13 @@ OR=24
HAS=25
HASANY=26
HASALL=27
BOOL=28
NUMBER=29
QUOTED_TEXT=30
KEY=31
WS=32
FREETEXT=33
HASNONE=28
BOOL=29
NUMBER=30
QUOTED_TEXT=31
KEY=32
WS=33
FREETEXT=34
'('=1
')'=2
'['=3

View File

@@ -50,146 +50,150 @@ func filterquerylexerLexerInit() {
"", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
"HAS", "HASANY", "HASALL", "BOOL", "NUMBER", "QUOTED_TEXT", "KEY", "WS",
"FREETEXT",
"HAS", "HASANY", "HASALL", "HASNONE", "BOOL", "NUMBER", "QUOTED_TEXT",
"KEY", "WS", "FREETEXT",
}
staticData.RuleNames = []string{
"LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
"HAS", "HASANY", "HASALL", "BOOL", "NUMBER", "QUOTED_TEXT", "KEY", "WS",
"DIGIT", "FREETEXT",
"HAS", "HASANY", "HASALL", "HASNONE", "BOOL", "NUMBER", "QUOTED_TEXT",
"KEY", "WS", "DIGIT", "FREETEXT",
}
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 0, 33, 270, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
4, 0, 34, 280, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1,
2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 83, 8, 5, 1, 6, 1, 6,
1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1,
11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13,
1, 13, 4, 13, 110, 8, 13, 11, 13, 12, 13, 111, 1, 13, 1, 13, 1, 13, 1,
13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15,
1, 15, 4, 15, 129, 8, 15, 11, 15, 12, 15, 130, 1, 15, 1, 15, 1, 15, 1,
15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16,
1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 153, 8, 17, 1, 18, 1,
18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19,
1, 19, 1, 19, 1, 19, 3, 19, 170, 8, 19, 1, 20, 1, 20, 1, 20, 1, 21, 1,
21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 24,
1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1,
26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27,
1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 3, 27, 213, 8, 27, 1, 28, 4, 28, 216,
8, 28, 11, 28, 12, 28, 217, 1, 28, 1, 28, 4, 28, 222, 8, 28, 11, 28, 12,
28, 223, 3, 28, 226, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 232, 8,
29, 10, 29, 12, 29, 235, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29,
242, 8, 29, 10, 29, 12, 29, 245, 9, 29, 1, 29, 3, 29, 248, 8, 29, 1, 30,
1, 30, 5, 30, 252, 8, 30, 10, 30, 12, 30, 255, 9, 30, 1, 31, 4, 31, 258,
8, 31, 11, 31, 12, 31, 259, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 4, 33, 267,
8, 33, 11, 33, 12, 33, 268, 0, 0, 34, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11,
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 1, 0, 1, 0, 1, 1,
1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 85, 8,
5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1,
10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13,
1, 13, 1, 13, 1, 13, 4, 13, 112, 8, 13, 11, 13, 12, 13, 113, 1, 13, 1,
13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15,
1, 15, 1, 15, 1, 15, 4, 15, 131, 8, 15, 11, 15, 12, 15, 132, 1, 15, 1,
15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16,
1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 155, 8,
17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19,
1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 172, 8, 19, 1, 20, 1, 20, 1,
20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23,
1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1,
25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27,
1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1,
28, 1, 28, 1, 28, 1, 28, 1, 28, 3, 28, 223, 8, 28, 1, 29, 4, 29, 226, 8,
29, 11, 29, 12, 29, 227, 1, 29, 1, 29, 4, 29, 232, 8, 29, 11, 29, 12, 29,
233, 3, 29, 236, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 242, 8, 30,
10, 30, 12, 30, 245, 9, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 252,
8, 30, 10, 30, 12, 30, 255, 9, 30, 1, 30, 3, 30, 258, 8, 30, 1, 31, 1,
31, 5, 31, 262, 8, 31, 10, 31, 12, 31, 265, 9, 31, 1, 32, 4, 32, 268, 8,
32, 11, 32, 12, 32, 269, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 4, 34, 277,
8, 34, 11, 34, 12, 34, 278, 0, 0, 35, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11,
6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15,
31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24,
49, 25, 51, 26, 53, 27, 55, 28, 57, 29, 59, 30, 61, 31, 63, 32, 65, 0,
67, 33, 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0,
75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 78, 78, 110, 110, 2, 0,
79, 79, 111, 111, 2, 0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32, 2, 0, 66,
66, 98, 98, 2, 0, 87, 87, 119, 119, 2, 0, 88, 88, 120, 120, 2, 0, 83, 83,
115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0, 80, 80,
112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100,
100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117,
117, 2, 0, 70, 70, 102, 102, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92,
4, 0, 48, 57, 65, 90, 95, 95, 97, 122, 7, 0, 42, 42, 46, 46, 48, 57, 65,
49, 25, 51, 26, 53, 27, 55, 28, 57, 29, 59, 30, 61, 31, 63, 32, 65, 33,
67, 0, 69, 34, 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105,
2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 78, 78, 110, 110,
2, 0, 79, 79, 111, 111, 2, 0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32, 2,
0, 66, 66, 98, 98, 2, 0, 87, 87, 119, 119, 2, 0, 88, 88, 120, 120, 2, 0,
83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0,
80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0, 68,
68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85,
85, 117, 117, 2, 0, 70, 70, 102, 102, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39,
92, 92, 4, 0, 48, 57, 65, 90, 95, 95, 97, 122, 6, 0, 46, 46, 48, 57, 65,
91, 93, 93, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57,
8, 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 285,
0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0,
0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0,
0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0,
0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1,
0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39,
1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0,
47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0,
0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0,
0, 0, 63, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 1, 69, 1, 0, 0, 0, 3, 71, 1, 0,
0, 0, 5, 73, 1, 0, 0, 0, 7, 75, 1, 0, 0, 0, 9, 77, 1, 0, 0, 0, 11, 82,
1, 0, 0, 0, 13, 84, 1, 0, 0, 0, 15, 87, 1, 0, 0, 0, 17, 90, 1, 0, 0, 0,
19, 92, 1, 0, 0, 0, 21, 95, 1, 0, 0, 0, 23, 97, 1, 0, 0, 0, 25, 100, 1,
0, 0, 0, 27, 105, 1, 0, 0, 0, 29, 118, 1, 0, 0, 0, 31, 124, 1, 0, 0, 0,
33, 138, 1, 0, 0, 0, 35, 146, 1, 0, 0, 0, 37, 154, 1, 0, 0, 0, 39, 161,
1, 0, 0, 0, 41, 171, 1, 0, 0, 0, 43, 174, 1, 0, 0, 0, 45, 178, 1, 0, 0,
0, 47, 182, 1, 0, 0, 0, 49, 185, 1, 0, 0, 0, 51, 189, 1, 0, 0, 0, 53, 196,
1, 0, 0, 0, 55, 212, 1, 0, 0, 0, 57, 215, 1, 0, 0, 0, 59, 247, 1, 0, 0,
0, 61, 249, 1, 0, 0, 0, 63, 257, 1, 0, 0, 0, 65, 263, 1, 0, 0, 0, 67, 266,
1, 0, 0, 0, 69, 70, 5, 40, 0, 0, 70, 2, 1, 0, 0, 0, 71, 72, 5, 41, 0, 0,
72, 4, 1, 0, 0, 0, 73, 74, 5, 91, 0, 0, 74, 6, 1, 0, 0, 0, 75, 76, 5, 93,
0, 0, 76, 8, 1, 0, 0, 0, 77, 78, 5, 44, 0, 0, 78, 10, 1, 0, 0, 0, 79, 83,
5, 61, 0, 0, 80, 81, 5, 61, 0, 0, 81, 83, 5, 61, 0, 0, 82, 79, 1, 0, 0,
0, 82, 80, 1, 0, 0, 0, 83, 12, 1, 0, 0, 0, 84, 85, 5, 33, 0, 0, 85, 86,
5, 61, 0, 0, 86, 14, 1, 0, 0, 0, 87, 88, 5, 60, 0, 0, 88, 89, 5, 62, 0,
0, 89, 16, 1, 0, 0, 0, 90, 91, 5, 60, 0, 0, 91, 18, 1, 0, 0, 0, 92, 93,
5, 60, 0, 0, 93, 94, 5, 61, 0, 0, 94, 20, 1, 0, 0, 0, 95, 96, 5, 62, 0,
0, 96, 22, 1, 0, 0, 0, 97, 98, 5, 62, 0, 0, 98, 99, 5, 61, 0, 0, 99, 24,
1, 0, 0, 0, 100, 101, 7, 0, 0, 0, 101, 102, 7, 1, 0, 0, 102, 103, 7, 2,
0, 0, 103, 104, 7, 3, 0, 0, 104, 26, 1, 0, 0, 0, 105, 106, 7, 4, 0, 0,
106, 107, 7, 5, 0, 0, 107, 109, 7, 6, 0, 0, 108, 110, 7, 7, 0, 0, 109,
108, 1, 0, 0, 0, 110, 111, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 111, 112,
1, 0, 0, 0, 112, 113, 1, 0, 0, 0, 113, 114, 7, 0, 0, 0, 114, 115, 7, 1,
0, 0, 115, 116, 7, 2, 0, 0, 116, 117, 7, 3, 0, 0, 117, 28, 1, 0, 0, 0,
118, 119, 7, 1, 0, 0, 119, 120, 7, 0, 0, 0, 120, 121, 7, 1, 0, 0, 121,
122, 7, 2, 0, 0, 122, 123, 7, 3, 0, 0, 123, 30, 1, 0, 0, 0, 124, 125, 7,
4, 0, 0, 125, 126, 7, 5, 0, 0, 126, 128, 7, 6, 0, 0, 127, 129, 7, 7, 0,
0, 128, 127, 1, 0, 0, 0, 129, 130, 1, 0, 0, 0, 130, 128, 1, 0, 0, 0, 130,
131, 1, 0, 0, 0, 131, 132, 1, 0, 0, 0, 132, 133, 7, 1, 0, 0, 133, 134,
7, 0, 0, 0, 134, 135, 7, 1, 0, 0, 135, 136, 7, 2, 0, 0, 136, 137, 7, 3,
0, 0, 137, 32, 1, 0, 0, 0, 138, 139, 7, 8, 0, 0, 139, 140, 7, 3, 0, 0,
140, 141, 7, 6, 0, 0, 141, 142, 7, 9, 0, 0, 142, 143, 7, 3, 0, 0, 143,
144, 7, 3, 0, 0, 144, 145, 7, 4, 0, 0, 145, 34, 1, 0, 0, 0, 146, 147, 7,
3, 0, 0, 147, 148, 7, 10, 0, 0, 148, 149, 7, 1, 0, 0, 149, 150, 7, 11,
0, 0, 150, 152, 7, 6, 0, 0, 151, 153, 7, 11, 0, 0, 152, 151, 1, 0, 0, 0,
152, 153, 1, 0, 0, 0, 153, 36, 1, 0, 0, 0, 154, 155, 7, 12, 0, 0, 155,
156, 7, 3, 0, 0, 156, 157, 7, 13, 0, 0, 157, 158, 7, 3, 0, 0, 158, 159,
7, 10, 0, 0, 159, 160, 7, 14, 0, 0, 160, 38, 1, 0, 0, 0, 161, 162, 7, 15,
0, 0, 162, 163, 7, 5, 0, 0, 163, 164, 7, 4, 0, 0, 164, 165, 7, 6, 0, 0,
165, 166, 7, 16, 0, 0, 166, 167, 7, 1, 0, 0, 167, 169, 7, 4, 0, 0, 168,
170, 7, 11, 0, 0, 169, 168, 1, 0, 0, 0, 169, 170, 1, 0, 0, 0, 170, 40,
1, 0, 0, 0, 171, 172, 7, 1, 0, 0, 172, 173, 7, 4, 0, 0, 173, 42, 1, 0,
0, 0, 174, 175, 7, 4, 0, 0, 175, 176, 7, 5, 0, 0, 176, 177, 7, 6, 0, 0,
177, 44, 1, 0, 0, 0, 178, 179, 7, 16, 0, 0, 179, 180, 7, 4, 0, 0, 180,
181, 7, 17, 0, 0, 181, 46, 1, 0, 0, 0, 182, 183, 7, 5, 0, 0, 183, 184,
7, 12, 0, 0, 184, 48, 1, 0, 0, 0, 185, 186, 7, 18, 0, 0, 186, 187, 7, 16,
0, 0, 187, 188, 7, 11, 0, 0, 188, 50, 1, 0, 0, 0, 189, 190, 7, 18, 0, 0,
190, 191, 7, 16, 0, 0, 191, 192, 7, 11, 0, 0, 192, 193, 7, 16, 0, 0, 193,
194, 7, 4, 0, 0, 194, 195, 7, 19, 0, 0, 195, 52, 1, 0, 0, 0, 196, 197,
7, 18, 0, 0, 197, 198, 7, 16, 0, 0, 198, 199, 7, 11, 0, 0, 199, 200, 7,
16, 0, 0, 200, 201, 7, 0, 0, 0, 201, 202, 7, 0, 0, 0, 202, 54, 1, 0, 0,
0, 203, 204, 7, 6, 0, 0, 204, 205, 7, 12, 0, 0, 205, 206, 7, 20, 0, 0,
206, 213, 7, 3, 0, 0, 207, 208, 7, 21, 0, 0, 208, 209, 7, 16, 0, 0, 209,
210, 7, 0, 0, 0, 210, 211, 7, 11, 0, 0, 211, 213, 7, 3, 0, 0, 212, 203,
1, 0, 0, 0, 212, 207, 1, 0, 0, 0, 213, 56, 1, 0, 0, 0, 214, 216, 3, 65,
32, 0, 215, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 215, 1, 0, 0, 0,
217, 218, 1, 0, 0, 0, 218, 225, 1, 0, 0, 0, 219, 221, 5, 46, 0, 0, 220,
222, 3, 65, 32, 0, 221, 220, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 221,
1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 226, 1, 0, 0, 0, 225, 219, 1, 0,
0, 0, 225, 226, 1, 0, 0, 0, 226, 58, 1, 0, 0, 0, 227, 233, 5, 34, 0, 0,
228, 232, 8, 22, 0, 0, 229, 230, 5, 92, 0, 0, 230, 232, 9, 0, 0, 0, 231,
228, 1, 0, 0, 0, 231, 229, 1, 0, 0, 0, 232, 235, 1, 0, 0, 0, 233, 231,
1, 0, 0, 0, 233, 234, 1, 0, 0, 0, 234, 236, 1, 0, 0, 0, 235, 233, 1, 0,
0, 0, 236, 248, 5, 34, 0, 0, 237, 243, 5, 39, 0, 0, 238, 242, 8, 23, 0,
0, 239, 240, 5, 92, 0, 0, 240, 242, 9, 0, 0, 0, 241, 238, 1, 0, 0, 0, 241,
239, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244,
1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 248, 5, 39,
0, 0, 247, 227, 1, 0, 0, 0, 247, 237, 1, 0, 0, 0, 248, 60, 1, 0, 0, 0,
249, 253, 7, 24, 0, 0, 250, 252, 7, 25, 0, 0, 251, 250, 1, 0, 0, 0, 252,
255, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254, 62, 1,
0, 0, 0, 255, 253, 1, 0, 0, 0, 256, 258, 7, 26, 0, 0, 257, 256, 1, 0, 0,
0, 258, 259, 1, 0, 0, 0, 259, 257, 1, 0, 0, 0, 259, 260, 1, 0, 0, 0, 260,
261, 1, 0, 0, 0, 261, 262, 6, 31, 0, 0, 262, 64, 1, 0, 0, 0, 263, 264,
7, 27, 0, 0, 264, 66, 1, 0, 0, 0, 265, 267, 8, 28, 0, 0, 266, 265, 1, 0,
0, 0, 267, 268, 1, 0, 0, 0, 268, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0,
269, 68, 1, 0, 0, 0, 18, 0, 82, 111, 130, 152, 169, 212, 217, 223, 225,
231, 233, 241, 243, 247, 253, 259, 268, 1, 6, 0, 0,
7, 0, 9, 10, 13, 13, 32, 34, 39, 41, 60, 62, 91, 91, 93, 93, 295, 0, 1,
1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9,
1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0,
17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0,
0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0,
0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0,
0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1,
0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55,
1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0,
63, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 1, 71, 1, 0, 0, 0,
3, 73, 1, 0, 0, 0, 5, 75, 1, 0, 0, 0, 7, 77, 1, 0, 0, 0, 9, 79, 1, 0, 0,
0, 11, 84, 1, 0, 0, 0, 13, 86, 1, 0, 0, 0, 15, 89, 1, 0, 0, 0, 17, 92,
1, 0, 0, 0, 19, 94, 1, 0, 0, 0, 21, 97, 1, 0, 0, 0, 23, 99, 1, 0, 0, 0,
25, 102, 1, 0, 0, 0, 27, 107, 1, 0, 0, 0, 29, 120, 1, 0, 0, 0, 31, 126,
1, 0, 0, 0, 33, 140, 1, 0, 0, 0, 35, 148, 1, 0, 0, 0, 37, 156, 1, 0, 0,
0, 39, 163, 1, 0, 0, 0, 41, 173, 1, 0, 0, 0, 43, 176, 1, 0, 0, 0, 45, 180,
1, 0, 0, 0, 47, 184, 1, 0, 0, 0, 49, 187, 1, 0, 0, 0, 51, 191, 1, 0, 0,
0, 53, 198, 1, 0, 0, 0, 55, 205, 1, 0, 0, 0, 57, 222, 1, 0, 0, 0, 59, 225,
1, 0, 0, 0, 61, 257, 1, 0, 0, 0, 63, 259, 1, 0, 0, 0, 65, 267, 1, 0, 0,
0, 67, 273, 1, 0, 0, 0, 69, 276, 1, 0, 0, 0, 71, 72, 5, 40, 0, 0, 72, 2,
1, 0, 0, 0, 73, 74, 5, 41, 0, 0, 74, 4, 1, 0, 0, 0, 75, 76, 5, 91, 0, 0,
76, 6, 1, 0, 0, 0, 77, 78, 5, 93, 0, 0, 78, 8, 1, 0, 0, 0, 79, 80, 5, 44,
0, 0, 80, 10, 1, 0, 0, 0, 81, 85, 5, 61, 0, 0, 82, 83, 5, 61, 0, 0, 83,
85, 5, 61, 0, 0, 84, 81, 1, 0, 0, 0, 84, 82, 1, 0, 0, 0, 85, 12, 1, 0,
0, 0, 86, 87, 5, 33, 0, 0, 87, 88, 5, 61, 0, 0, 88, 14, 1, 0, 0, 0, 89,
90, 5, 60, 0, 0, 90, 91, 5, 62, 0, 0, 91, 16, 1, 0, 0, 0, 92, 93, 5, 60,
0, 0, 93, 18, 1, 0, 0, 0, 94, 95, 5, 60, 0, 0, 95, 96, 5, 61, 0, 0, 96,
20, 1, 0, 0, 0, 97, 98, 5, 62, 0, 0, 98, 22, 1, 0, 0, 0, 99, 100, 5, 62,
0, 0, 100, 101, 5, 61, 0, 0, 101, 24, 1, 0, 0, 0, 102, 103, 7, 0, 0, 0,
103, 104, 7, 1, 0, 0, 104, 105, 7, 2, 0, 0, 105, 106, 7, 3, 0, 0, 106,
26, 1, 0, 0, 0, 107, 108, 7, 4, 0, 0, 108, 109, 7, 5, 0, 0, 109, 111, 7,
6, 0, 0, 110, 112, 7, 7, 0, 0, 111, 110, 1, 0, 0, 0, 112, 113, 1, 0, 0,
0, 113, 111, 1, 0, 0, 0, 113, 114, 1, 0, 0, 0, 114, 115, 1, 0, 0, 0, 115,
116, 7, 0, 0, 0, 116, 117, 7, 1, 0, 0, 117, 118, 7, 2, 0, 0, 118, 119,
7, 3, 0, 0, 119, 28, 1, 0, 0, 0, 120, 121, 7, 1, 0, 0, 121, 122, 7, 0,
0, 0, 122, 123, 7, 1, 0, 0, 123, 124, 7, 2, 0, 0, 124, 125, 7, 3, 0, 0,
125, 30, 1, 0, 0, 0, 126, 127, 7, 4, 0, 0, 127, 128, 7, 5, 0, 0, 128, 130,
7, 6, 0, 0, 129, 131, 7, 7, 0, 0, 130, 129, 1, 0, 0, 0, 131, 132, 1, 0,
0, 0, 132, 130, 1, 0, 0, 0, 132, 133, 1, 0, 0, 0, 133, 134, 1, 0, 0, 0,
134, 135, 7, 1, 0, 0, 135, 136, 7, 0, 0, 0, 136, 137, 7, 1, 0, 0, 137,
138, 7, 2, 0, 0, 138, 139, 7, 3, 0, 0, 139, 32, 1, 0, 0, 0, 140, 141, 7,
8, 0, 0, 141, 142, 7, 3, 0, 0, 142, 143, 7, 6, 0, 0, 143, 144, 7, 9, 0,
0, 144, 145, 7, 3, 0, 0, 145, 146, 7, 3, 0, 0, 146, 147, 7, 4, 0, 0, 147,
34, 1, 0, 0, 0, 148, 149, 7, 3, 0, 0, 149, 150, 7, 10, 0, 0, 150, 151,
7, 1, 0, 0, 151, 152, 7, 11, 0, 0, 152, 154, 7, 6, 0, 0, 153, 155, 7, 11,
0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 36, 1, 0, 0, 0,
156, 157, 7, 12, 0, 0, 157, 158, 7, 3, 0, 0, 158, 159, 7, 13, 0, 0, 159,
160, 7, 3, 0, 0, 160, 161, 7, 10, 0, 0, 161, 162, 7, 14, 0, 0, 162, 38,
1, 0, 0, 0, 163, 164, 7, 15, 0, 0, 164, 165, 7, 5, 0, 0, 165, 166, 7, 4,
0, 0, 166, 167, 7, 6, 0, 0, 167, 168, 7, 16, 0, 0, 168, 169, 7, 1, 0, 0,
169, 171, 7, 4, 0, 0, 170, 172, 7, 11, 0, 0, 171, 170, 1, 0, 0, 0, 171,
172, 1, 0, 0, 0, 172, 40, 1, 0, 0, 0, 173, 174, 7, 1, 0, 0, 174, 175, 7,
4, 0, 0, 175, 42, 1, 0, 0, 0, 176, 177, 7, 4, 0, 0, 177, 178, 7, 5, 0,
0, 178, 179, 7, 6, 0, 0, 179, 44, 1, 0, 0, 0, 180, 181, 7, 16, 0, 0, 181,
182, 7, 4, 0, 0, 182, 183, 7, 17, 0, 0, 183, 46, 1, 0, 0, 0, 184, 185,
7, 5, 0, 0, 185, 186, 7, 12, 0, 0, 186, 48, 1, 0, 0, 0, 187, 188, 7, 18,
0, 0, 188, 189, 7, 16, 0, 0, 189, 190, 7, 11, 0, 0, 190, 50, 1, 0, 0, 0,
191, 192, 7, 18, 0, 0, 192, 193, 7, 16, 0, 0, 193, 194, 7, 11, 0, 0, 194,
195, 7, 16, 0, 0, 195, 196, 7, 4, 0, 0, 196, 197, 7, 19, 0, 0, 197, 52,
1, 0, 0, 0, 198, 199, 7, 18, 0, 0, 199, 200, 7, 16, 0, 0, 200, 201, 7,
11, 0, 0, 201, 202, 7, 16, 0, 0, 202, 203, 7, 0, 0, 0, 203, 204, 7, 0,
0, 0, 204, 54, 1, 0, 0, 0, 205, 206, 7, 18, 0, 0, 206, 207, 7, 16, 0, 0,
207, 208, 7, 11, 0, 0, 208, 209, 7, 4, 0, 0, 209, 210, 7, 5, 0, 0, 210,
211, 7, 4, 0, 0, 211, 212, 7, 3, 0, 0, 212, 56, 1, 0, 0, 0, 213, 214, 7,
6, 0, 0, 214, 215, 7, 12, 0, 0, 215, 216, 7, 20, 0, 0, 216, 223, 7, 3,
0, 0, 217, 218, 7, 21, 0, 0, 218, 219, 7, 16, 0, 0, 219, 220, 7, 0, 0,
0, 220, 221, 7, 11, 0, 0, 221, 223, 7, 3, 0, 0, 222, 213, 1, 0, 0, 0, 222,
217, 1, 0, 0, 0, 223, 58, 1, 0, 0, 0, 224, 226, 3, 67, 33, 0, 225, 224,
1, 0, 0, 0, 226, 227, 1, 0, 0, 0, 227, 225, 1, 0, 0, 0, 227, 228, 1, 0,
0, 0, 228, 235, 1, 0, 0, 0, 229, 231, 5, 46, 0, 0, 230, 232, 3, 67, 33,
0, 231, 230, 1, 0, 0, 0, 232, 233, 1, 0, 0, 0, 233, 231, 1, 0, 0, 0, 233,
234, 1, 0, 0, 0, 234, 236, 1, 0, 0, 0, 235, 229, 1, 0, 0, 0, 235, 236,
1, 0, 0, 0, 236, 60, 1, 0, 0, 0, 237, 243, 5, 34, 0, 0, 238, 242, 8, 22,
0, 0, 239, 240, 5, 92, 0, 0, 240, 242, 9, 0, 0, 0, 241, 238, 1, 0, 0, 0,
241, 239, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243,
244, 1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 258,
5, 34, 0, 0, 247, 253, 5, 39, 0, 0, 248, 252, 8, 23, 0, 0, 249, 250, 5,
92, 0, 0, 250, 252, 9, 0, 0, 0, 251, 248, 1, 0, 0, 0, 251, 249, 1, 0, 0,
0, 252, 255, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254,
256, 1, 0, 0, 0, 255, 253, 1, 0, 0, 0, 256, 258, 5, 39, 0, 0, 257, 237,
1, 0, 0, 0, 257, 247, 1, 0, 0, 0, 258, 62, 1, 0, 0, 0, 259, 263, 7, 24,
0, 0, 260, 262, 7, 25, 0, 0, 261, 260, 1, 0, 0, 0, 262, 265, 1, 0, 0, 0,
263, 261, 1, 0, 0, 0, 263, 264, 1, 0, 0, 0, 264, 64, 1, 0, 0, 0, 265, 263,
1, 0, 0, 0, 266, 268, 7, 26, 0, 0, 267, 266, 1, 0, 0, 0, 268, 269, 1, 0,
0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0,
271, 272, 6, 32, 0, 0, 272, 66, 1, 0, 0, 0, 273, 274, 7, 27, 0, 0, 274,
68, 1, 0, 0, 0, 275, 277, 8, 28, 0, 0, 276, 275, 1, 0, 0, 0, 277, 278,
1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 70, 1, 0,
0, 0, 18, 0, 84, 113, 132, 154, 171, 222, 227, 233, 235, 241, 243, 251,
253, 257, 263, 269, 278, 1, 6, 0, 0,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
@@ -257,10 +261,11 @@ const (
FilterQueryLexerHAS = 25
FilterQueryLexerHASANY = 26
FilterQueryLexerHASALL = 27
FilterQueryLexerBOOL = 28
FilterQueryLexerNUMBER = 29
FilterQueryLexerQUOTED_TEXT = 30
FilterQueryLexerKEY = 31
FilterQueryLexerWS = 32
FilterQueryLexerFREETEXT = 33
FilterQueryLexerHASNONE = 28
FilterQueryLexerBOOL = 29
FilterQueryLexerNUMBER = 30
FilterQueryLexerQUOTED_TEXT = 31
FilterQueryLexerKEY = 32
FilterQueryLexerWS = 33
FilterQueryLexerFREETEXT = 34
)

View File

@@ -40,8 +40,8 @@ func filterqueryParserInit() {
"", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
"HAS", "HASANY", "HASALL", "BOOL", "NUMBER", "QUOTED_TEXT", "KEY", "WS",
"FREETEXT",
"HAS", "HASANY", "HASALL", "HASNONE", "BOOL", "NUMBER", "QUOTED_TEXT",
"KEY", "WS", "FREETEXT",
}
staticData.RuleNames = []string{
"query", "expression", "orExpression", "andExpression", "unaryExpression",
@@ -51,7 +51,7 @@ func filterqueryParserInit() {
}
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 1, 33, 212, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7,
4, 1, 34, 212, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7,
4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7,
10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15,
2, 16, 7, 16, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 5, 2, 43,
@@ -72,7 +72,7 @@ func filterqueryParserInit() {
13, 1, 13, 1, 13, 3, 13, 202, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15,
1, 15, 1, 16, 1, 16, 1, 16, 0, 0, 17, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18,
20, 22, 24, 26, 28, 30, 32, 0, 6, 1, 0, 7, 8, 2, 0, 13, 13, 15, 15, 2,
0, 14, 14, 16, 16, 2, 0, 30, 30, 33, 33, 1, 0, 25, 27, 1, 0, 28, 31, 225,
0, 14, 14, 16, 16, 2, 0, 31, 31, 34, 34, 1, 0, 25, 28, 1, 0, 29, 32, 225,
0, 34, 1, 0, 0, 0, 2, 37, 1, 0, 0, 0, 4, 39, 1, 0, 0, 0, 6, 47, 1, 0, 0,
0, 8, 57, 1, 0, 0, 0, 10, 69, 1, 0, 0, 0, 12, 147, 1, 0, 0, 0, 14, 159,
1, 0, 0, 0, 16, 173, 1, 0, 0, 0, 18, 175, 1, 0, 0, 0, 20, 183, 1, 0, 0,
@@ -140,7 +140,7 @@ func filterqueryParserInit() {
201, 198, 1, 0, 0, 0, 201, 199, 1, 0, 0, 0, 201, 200, 1, 0, 0, 0, 202,
27, 1, 0, 0, 0, 203, 204, 5, 3, 0, 0, 204, 205, 3, 18, 9, 0, 205, 206,
5, 4, 0, 0, 206, 29, 1, 0, 0, 0, 207, 208, 7, 5, 0, 0, 208, 31, 1, 0, 0,
0, 209, 210, 5, 31, 0, 0, 210, 33, 1, 0, 0, 0, 11, 44, 51, 53, 57, 69,
0, 209, 210, 5, 32, 0, 0, 210, 33, 1, 0, 0, 0, 11, 44, 51, 53, 57, 69,
147, 159, 173, 180, 195, 201,
}
deserializer := antlr.NewATNDeserializer(nil)
@@ -207,12 +207,13 @@ const (
FilterQueryParserHAS = 25
FilterQueryParserHASANY = 26
FilterQueryParserHASALL = 27
FilterQueryParserBOOL = 28
FilterQueryParserNUMBER = 29
FilterQueryParserQUOTED_TEXT = 30
FilterQueryParserKEY = 31
FilterQueryParserWS = 32
FilterQueryParserFREETEXT = 33
FilterQueryParserHASNONE = 28
FilterQueryParserBOOL = 29
FilterQueryParserNUMBER = 30
FilterQueryParserQUOTED_TEXT = 31
FilterQueryParserKEY = 32
FilterQueryParserWS = 33
FilterQueryParserFREETEXT = 34
)
// FilterQueryParser rules.
@@ -802,7 +803,7 @@ func (p *FilterQueryParser) AndExpression() (localctx IAndExpressionContext) {
}
_la = p.GetTokenStream().LA(1)
for (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&12058624002) != 0 {
for (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&24138219522) != 0 {
p.SetState(51)
p.GetErrorHandler().Sync(p)
if p.HasError() {
@@ -824,7 +825,7 @@ func (p *FilterQueryParser) AndExpression() (localctx IAndExpressionContext) {
p.UnaryExpression()
}
case FilterQueryParserLPAREN, FilterQueryParserNOT, FilterQueryParserHAS, FilterQueryParserHASANY, FilterQueryParserHASALL, FilterQueryParserQUOTED_TEXT, FilterQueryParserKEY, FilterQueryParserFREETEXT:
case FilterQueryParserLPAREN, FilterQueryParserNOT, FilterQueryParserHAS, FilterQueryParserHASANY, FilterQueryParserHASALL, FilterQueryParserHASNONE, FilterQueryParserQUOTED_TEXT, FilterQueryParserKEY, FilterQueryParserFREETEXT:
{
p.SetState(50)
p.UnaryExpression()
@@ -2652,6 +2653,7 @@ type IFunctionCallContext interface {
HAS() antlr.TerminalNode
HASANY() antlr.TerminalNode
HASALL() antlr.TerminalNode
HASNONE() antlr.TerminalNode
// IsFunctionCallContext differentiates from other interfaces.
IsFunctionCallContext()
@@ -2725,6 +2727,10 @@ func (s *FunctionCallContext) HASALL() antlr.TerminalNode {
return s.GetToken(FilterQueryParserHASALL, 0)
}
func (s *FunctionCallContext) HASNONE() antlr.TerminalNode {
return s.GetToken(FilterQueryParserHASNONE, 0)
}
func (s *FunctionCallContext) GetRuleContext() antlr.RuleContext {
return s
}
@@ -2765,7 +2771,7 @@ func (p *FilterQueryParser) FunctionCall() (localctx IFunctionCallContext) {
p.SetState(185)
_la = p.GetTokenStream().LA(1)
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&234881024) != 0) {
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&503316480) != 0) {
p.GetErrorHandler().RecoverInline(p)
} else {
p.GetErrorHandler().ReportMatch(p)
@@ -3405,7 +3411,7 @@ func (p *FilterQueryParser) Value() (localctx IValueContext) {
p.SetState(207)
_la = p.GetTokenStream().LA(1)
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&4026531840) != 0) {
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&8053063680) != 0) {
p.GetErrorHandler().RecoverInline(p)
} else {
p.GetErrorHandler().ReportMatch(p)

View File

@@ -1,48 +0,0 @@
package parser
import (
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/antlr4-go/antlr/v4"
)
// QueryStringToKeysSelectors converts a query string to a list of field key selectors
//
// e.g. "service.name="query-service" AND http.status_code=200 AND resource.k8s.namespace.name="application"" -> []*telemetrytypes.FieldKeySelector{
// {
// Name: "service.name",
// FieldContext: telemetrytypes.FieldContextUnspecified,
// FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
// },
// {
// Name: "http.status_code",
// FieldContext: telemetrytypes.FieldContextUnspecified,
// FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
// },
// {
// Name: "resource.k8s.namespace.name",
// FieldContext: telemetrytypes.FieldContextResource,
// FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
// },
// }
func QueryStringToKeysSelectors(query string) ([]*telemetrytypes.FieldKeySelector, error) {
lexer := NewFilterQueryLexer(antlr.NewInputStream(query))
keys := []*telemetrytypes.FieldKeySelector{}
for {
tok := lexer.NextToken()
if tok.GetTokenType() == antlr.TokenEOF {
break
}
if tok.GetTokenType() == FilterQueryLexerKEY {
key := telemetrytypes.GetFieldKeyFromKeyText(tok.GetText())
keys = append(keys, &telemetrytypes.FieldKeySelector{
Name: key.Name,
Signal: key.Signal,
FieldContext: key.FieldContext,
FieldDataType: key.FieldDataType,
})
}
}
return keys, nil
}

View File

@@ -1,101 +0,0 @@
package parser
import (
"testing"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
func TestQueryToKeys(t *testing.T) {
testCases := []struct {
query string
expectedKeys []telemetrytypes.FieldKeySelector
}{
{
query: `service.name="redis"`,
expectedKeys: []telemetrytypes.FieldKeySelector{
{
Name: "service.name",
Signal: telemetrytypes.SignalUnspecified,
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
},
{
query: `resource.service.name="redis"`,
expectedKeys: []telemetrytypes.FieldKeySelector{
{
Name: "service.name",
Signal: telemetrytypes.SignalUnspecified,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
},
{
query: `service.name="redis" AND http.status_code=200`,
expectedKeys: []telemetrytypes.FieldKeySelector{
{
Name: "service.name",
Signal: telemetrytypes.SignalUnspecified,
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
{
Name: "http.status_code",
Signal: telemetrytypes.SignalUnspecified,
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
},
{
query: `has(payload.user_ids, 123)`,
expectedKeys: []telemetrytypes.FieldKeySelector{
{
Name: "payload.user_ids",
Signal: telemetrytypes.SignalUnspecified,
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
},
{
query: `body.user_ids[*] = 123`,
expectedKeys: []telemetrytypes.FieldKeySelector{
{
Name: "body.user_ids[*]",
Signal: telemetrytypes.SignalUnspecified,
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
},
}
for _, testCase := range testCases {
keys, err := QueryStringToKeysSelectors(testCase.query)
if err != nil {
t.Fatalf("Error: %v", err)
}
if len(keys) != len(testCase.expectedKeys) {
t.Fatalf("Expected %d keys, got %d", len(testCase.expectedKeys), len(keys))
}
for i, key := range keys {
if key.Name != testCase.expectedKeys[i].Name {
t.Fatalf("Expected key %v, got %v", testCase.expectedKeys[i], key)
}
if key.Signal != testCase.expectedKeys[i].Signal {
t.Fatalf("Expected signal %v, got %v", testCase.expectedKeys[i].Signal, key.Signal)
}
if key.FieldContext != testCase.expectedKeys[i].FieldContext {
t.Fatalf("Expected field context %v, got %v", testCase.expectedKeys[i].FieldContext, key.FieldContext)
}
if key.FieldDataType != testCase.expectedKeys[i].FieldDataType {
t.Fatalf("Expected field data type %v, got %v", testCase.expectedKeys[i].FieldDataType, key.FieldDataType)
}
}
}
}

View File

@@ -1,587 +0,0 @@
package parser
import (
"context"
"fmt"
"strconv"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/antlr4-go/antlr/v4"
sqlbuilder "github.com/huandu/go-sqlbuilder"
)
// WhereClauseVisitor implements the FilterQueryVisitor interface
// to convert the parsed filter expressions into ClickHouse WHERE clause
type WhereClauseVisitor struct {
conditionBuilder qbtypes.ConditionBuilder
warnings []error
fieldKeys map[string][]telemetrytypes.TelemetryFieldKey
errors []error
builder *sqlbuilder.SelectBuilder
fullTextColumn telemetrytypes.TelemetryFieldKey
}
// NewWhereClauseVisitor creates a new WhereClauseVisitor
func NewWhereClauseVisitor(
conditionBuilder qbtypes.ConditionBuilder,
fieldKeys map[string][]telemetrytypes.TelemetryFieldKey,
builder *sqlbuilder.SelectBuilder,
fullTextColumn telemetrytypes.TelemetryFieldKey,
) *WhereClauseVisitor {
return &WhereClauseVisitor{
conditionBuilder: conditionBuilder,
fieldKeys: fieldKeys,
builder: builder,
fullTextColumn: fullTextColumn,
}
}
type SyntaxError struct {
line, column int
msg string
}
func (e *SyntaxError) Error() string {
return fmt.Sprintf("line %d:%d %s", e.line, e.column, e.msg)
}
// ErrorListener is a custom error listener to capture syntax errors
type ErrorListener struct {
*antlr.DefaultErrorListener
Errors []error
}
// NewErrorListener creates a new error listener
func NewErrorListener() *ErrorListener {
return &ErrorListener{
DefaultErrorListener: antlr.NewDefaultErrorListener(),
Errors: []error{},
}
}
// SyntaxError captures syntax errors during parsing
func (l *ErrorListener) SyntaxError(recognizer antlr.Recognizer, offendingSymbol any, line, column int, msg string, e antlr.RecognitionException) {
l.Errors = append(l.Errors, &SyntaxError{line: line, column: column, msg: msg})
}
// PrepareWhereClause generates a ClickHouse compatible WHERE clause from the filter query
func PrepareWhereClause(
query string,
fieldKeys map[string][]telemetrytypes.TelemetryFieldKey,
conditionBuilder qbtypes.ConditionBuilder,
fullTextColumn telemetrytypes.TelemetryFieldKey,
) (string, []any, []error, error) {
// Setup the ANTLR parsing pipeline
input := antlr.NewInputStream(query)
lexer := NewFilterQueryLexer(input)
sb := sqlbuilder.NewSelectBuilder()
visitor := NewWhereClauseVisitor(conditionBuilder, fieldKeys, sb, fullTextColumn)
// Set up error handling
lexerErrorListener := NewErrorListener()
lexer.RemoveErrorListeners()
lexer.AddErrorListener(lexerErrorListener)
tokens := antlr.NewCommonTokenStream(lexer, 0)
parserErrorListener := NewErrorListener()
parser := NewFilterQueryParser(tokens)
parser.RemoveErrorListeners()
parser.AddErrorListener(parserErrorListener)
// Parse the query
tree := parser.Query()
// Handle syntax errors
if len(parserErrorListener.Errors) > 0 {
combinedErrors := errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"found %d syntax errors while parsing the search expression: %v",
len(parserErrorListener.Errors),
parserErrorListener.Errors,
)
return "", nil, nil, combinedErrors
}
// Visit the parse tree with our ClickHouse visitor
cond := visitor.Visit(tree).(string)
if len(visitor.errors) > 0 {
// combine all errors into a single error
combinedErrors := errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"found %d errors while parsing the search expression: %v",
len(visitor.errors),
visitor.errors,
)
return "", nil, nil, combinedErrors
}
whereClause, args := visitor.builder.Where(cond).BuildWithFlavor(sqlbuilder.ClickHouse)
return whereClause, args, visitor.warnings, nil
}
// Visit dispatches to the specific visit method based on node type
func (v *WhereClauseVisitor) Visit(tree antlr.ParseTree) any {
// Handle nil nodes to prevent panic
if tree == nil {
return ""
}
switch t := tree.(type) {
case *QueryContext:
return v.VisitQuery(t)
case *ExpressionContext:
return v.VisitExpression(t)
case *OrExpressionContext:
return v.VisitOrExpression(t)
case *AndExpressionContext:
return v.VisitAndExpression(t)
case *UnaryExpressionContext:
return v.VisitUnaryExpression(t)
case *PrimaryContext:
return v.VisitPrimary(t)
case *ComparisonContext:
return v.VisitComparison(t)
case *InClauseContext:
return v.VisitInClause(t)
case *NotInClauseContext:
return v.VisitNotInClause(t)
case *ValueListContext:
return v.VisitValueList(t)
case *FullTextContext:
return v.VisitFullText(t)
case *FunctionCallContext:
return v.VisitFunctionCall(t)
case *FunctionParamListContext:
return v.VisitFunctionParamList(t)
case *FunctionParamContext:
return v.VisitFunctionParam(t)
case *ArrayContext:
return v.VisitArray(t)
case *ValueContext:
return v.VisitValue(t)
case *KeyContext:
return v.VisitKey(t)
default:
return ""
}
}
func (v *WhereClauseVisitor) VisitQuery(ctx *QueryContext) any {
return v.Visit(ctx.Expression())
}
// VisitExpression passes through to the orExpression
func (v *WhereClauseVisitor) VisitExpression(ctx *ExpressionContext) any {
return v.Visit(ctx.OrExpression())
}
// VisitOrExpression handles OR expressions
func (v *WhereClauseVisitor) VisitOrExpression(ctx *OrExpressionContext) any {
andExpressions := ctx.AllAndExpression()
andExpressionConditions := make([]string, len(andExpressions))
for i, expr := range andExpressions {
andExpressionConditions[i] = v.Visit(expr).(string)
}
if len(andExpressionConditions) == 1 {
return andExpressionConditions[0]
}
return v.builder.Or(andExpressionConditions...)
}
// VisitAndExpression handles AND expressions
func (v *WhereClauseVisitor) VisitAndExpression(ctx *AndExpressionContext) any {
unaryExpressions := ctx.AllUnaryExpression()
unaryExpressionConditions := make([]string, len(unaryExpressions))
for i, expr := range unaryExpressions {
unaryExpressionConditions[i] = v.Visit(expr).(string)
}
if len(unaryExpressionConditions) == 1 {
return unaryExpressionConditions[0]
}
return v.builder.And(unaryExpressionConditions...)
}
// VisitUnaryExpression handles NOT expressions
func (v *WhereClauseVisitor) VisitUnaryExpression(ctx *UnaryExpressionContext) any {
result := v.Visit(ctx.Primary()).(string)
// Check if this is a NOT expression
if ctx.NOT() != nil {
return fmt.Sprintf("NOT (%s)", result)
}
return result
}
// VisitPrimary handles grouped expressions, comparisons, function calls, and full-text search
func (v *WhereClauseVisitor) VisitPrimary(ctx *PrimaryContext) any {
if ctx.OrExpression() != nil {
// This is a parenthesized expression
return fmt.Sprintf("(%s)", v.Visit(ctx.OrExpression()).(string))
} else if ctx.Comparison() != nil {
return v.Visit(ctx.Comparison())
} else if ctx.FunctionCall() != nil {
return v.Visit(ctx.FunctionCall())
} else if ctx.FullText() != nil {
return v.Visit(ctx.FullText())
}
// Handle standalone key as a full text search term
if ctx.GetChildCount() == 1 {
child := ctx.GetChild(0)
if keyCtx, ok := child.(*KeyContext); ok {
// create a full text search condition on the body field
keyText := keyCtx.GetText()
cond, err := v.conditionBuilder.GetCondition(context.Background(), &v.fullTextColumn, qbtypes.FilterOperatorRegexp, keyText, v.builder)
if err != nil {
return ""
}
return cond
}
}
return "" // Should not happen with valid input
}
// VisitComparison handles all comparison operators
func (v *WhereClauseVisitor) VisitComparison(ctx *ComparisonContext) any {
keys := v.Visit(ctx.Key()).([]telemetrytypes.TelemetryFieldKey)
// Handle EXISTS specially
if ctx.EXISTS() != nil {
op := qbtypes.FilterOperatorExists
if ctx.NOT() != nil {
op = qbtypes.FilterOperatorNotExists
}
var conds []string
for _, key := range keys {
condition, err := v.conditionBuilder.GetCondition(context.Background(), &key, op, nil, v.builder)
if err != nil {
return ""
}
conds = append(conds, condition)
}
return v.builder.Or(conds...)
}
// Handle IN clause
if ctx.InClause() != nil || ctx.NotInClause() != nil {
values := v.Visit(ctx.InClause()).([]any)
op := qbtypes.FilterOperatorIn
if ctx.NotInClause() != nil {
op = qbtypes.FilterOperatorNotIn
}
var conds []string
for _, key := range keys {
condition, err := v.conditionBuilder.GetCondition(context.Background(), &key, op, values, v.builder)
if err != nil {
return ""
}
conds = append(conds, condition)
}
return v.builder.Or(conds...)
}
// Handle BETWEEN
if ctx.BETWEEN() != nil {
op := qbtypes.FilterOperatorBetween
if ctx.NOT() != nil {
op = qbtypes.FilterOperatorNotBetween
}
values := ctx.AllValue()
if len(values) != 2 {
return ""
}
value1 := v.Visit(values[0])
value2 := v.Visit(values[1])
var conds []string
for _, key := range keys {
condition, err := v.conditionBuilder.GetCondition(context.Background(), &key, op, []any{value1, value2}, v.builder)
if err != nil {
return ""
}
conds = append(conds, condition)
}
return v.builder.Or(conds...)
}
// Get all values for operations that need them
values := ctx.AllValue()
if len(values) > 0 {
value := v.Visit(values[0])
var op qbtypes.FilterOperator
// Handle each type of comparison
if ctx.EQUALS() != nil {
op = qbtypes.FilterOperatorEqual
} else if ctx.NOT_EQUALS() != nil || ctx.NEQ() != nil {
op = qbtypes.FilterOperatorNotEqual
} else if ctx.LT() != nil {
op = qbtypes.FilterOperatorLessThan
} else if ctx.LE() != nil {
op = qbtypes.FilterOperatorLessThanOrEq
} else if ctx.GT() != nil {
op = qbtypes.FilterOperatorGreaterThan
} else if ctx.GE() != nil {
op = qbtypes.FilterOperatorGreaterThanOrEq
} else if ctx.LIKE() != nil {
op = qbtypes.FilterOperatorLike
} else if ctx.ILIKE() != nil {
op = qbtypes.FilterOperatorLike
} else if ctx.NOT_LIKE() != nil {
op = qbtypes.FilterOperatorNotLike
} else if ctx.NOT_ILIKE() != nil {
op = qbtypes.FilterOperatorNotLike
} else if ctx.REGEXP() != nil {
op = qbtypes.FilterOperatorRegexp
} else if ctx.NOT() != nil && ctx.REGEXP() != nil {
op = qbtypes.FilterOperatorNotRegexp
} else if ctx.CONTAINS() != nil {
op = qbtypes.FilterOperatorContains
} else if ctx.NOT() != nil && ctx.CONTAINS() != nil {
op = qbtypes.FilterOperatorNotContains
}
var conds []string
for _, key := range keys {
condition, err := v.conditionBuilder.GetCondition(context.Background(), &key, op, value, v.builder)
if err != nil {
return ""
}
conds = append(conds, condition)
}
return v.builder.Or(conds...)
}
return "" // Should not happen with valid input
}
// VisitInClause handles IN expressions
func (v *WhereClauseVisitor) VisitInClause(ctx *InClauseContext) any {
return v.Visit(ctx.ValueList())
}
// VisitNotInClause handles NOT IN expressions
func (v *WhereClauseVisitor) VisitNotInClause(ctx *NotInClauseContext) any {
return v.Visit(ctx.ValueList())
}
// VisitValueList handles comma-separated value lists
func (v *WhereClauseVisitor) VisitValueList(ctx *ValueListContext) any {
values := ctx.AllValue()
parts := []any{}
for _, val := range values {
parts = append(parts, v.Visit(val))
}
return parts
}
// VisitFullText handles standalone quoted strings for full-text search
func (v *WhereClauseVisitor) VisitFullText(ctx *FullTextContext) any {
// remove quotes from the quotedText
quotedText := strings.Trim(ctx.QUOTED_TEXT().GetText(), "\"'")
cond, err := v.conditionBuilder.GetCondition(context.Background(), &v.fullTextColumn, qbtypes.FilterOperatorRegexp, quotedText, v.builder)
if err != nil {
return ""
}
return cond
}
// VisitFunctionCall handles function calls like has(), hasAny(), etc.
func (v *WhereClauseVisitor) VisitFunctionCall(ctx *FunctionCallContext) any {
// Get function name based on which token is present
var functionName string
if ctx.HAS() != nil {
functionName = "has"
} else if ctx.HASANY() != nil {
functionName = "hasAny"
} else if ctx.HASALL() != nil {
functionName = "hasAll"
} else {
// Default fallback
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"unknown function `%s`",
ctx.GetText(),
))
return ""
}
params := v.Visit(ctx.FunctionParamList()).([]any)
if len(params) < 2 {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"function `%s` expects key and value parameters",
functionName,
))
return ""
}
keys, ok := params[0].([]telemetrytypes.TelemetryFieldKey)
if !ok {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"function `%s` expects key parameter to be a field key",
functionName,
))
return ""
}
value := params[1:]
var conds []string
for _, key := range keys {
var fieldName string
if strings.HasPrefix(key.Name, telemetrylogs.BodyJSONStringSearchPrefix) {
fieldName, _ = telemetrylogs.GetBodyJSONKey(context.Background(), &key, qbtypes.FilterOperatorUnknown, value)
} else {
fieldName, _ = v.conditionBuilder.GetTableFieldName(context.Background(), &key)
}
var cond string
// Map our functions to ClickHouse equivalents
switch functionName {
case "has":
cond = fmt.Sprintf("has(%s, %s)", fieldName, v.builder.Var(value[0]))
case "hasAny":
cond = fmt.Sprintf("hasAny(%s, %s)", fieldName, v.builder.Var(value))
case "hasAll":
cond = fmt.Sprintf("hasAll(%s, %s)", fieldName, v.builder.Var(value))
}
conds = append(conds, cond)
}
return v.builder.Or(conds...)
}
// VisitFunctionParamList handles the parameter list for function calls
func (v *WhereClauseVisitor) VisitFunctionParamList(ctx *FunctionParamListContext) any {
params := ctx.AllFunctionParam()
parts := make([]any, len(params))
for i, param := range params {
parts[i] = v.Visit(param)
}
return parts
}
// VisitFunctionParam handles individual parameters in function calls
func (v *WhereClauseVisitor) VisitFunctionParam(ctx *FunctionParamContext) any {
if ctx.Key() != nil {
return v.Visit(ctx.Key())
} else if ctx.Value() != nil {
return v.Visit(ctx.Value())
} else if ctx.Array() != nil {
return v.Visit(ctx.Array())
}
return "" // Should not happen with valid input
}
// VisitArray handles array literals
func (v *WhereClauseVisitor) VisitArray(ctx *ArrayContext) any {
return v.Visit(ctx.ValueList())
}
// VisitValue handles literal values: strings, numbers, booleans
func (v *WhereClauseVisitor) VisitValue(ctx *ValueContext) any {
if ctx.QUOTED_TEXT() != nil {
txt := ctx.QUOTED_TEXT().GetText()
// trim quotes and return the value
return strings.Trim(txt, "\"'")
} else if ctx.NUMBER() != nil {
number, err := strconv.ParseFloat(ctx.NUMBER().GetText(), 64)
if err != nil {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"failed to parse number %s",
ctx.NUMBER().GetText(),
))
return ""
}
return number
} else if ctx.BOOL() != nil {
// Convert to ClickHouse boolean literal
boolText := strings.ToLower(ctx.BOOL().GetText())
return boolText == "true"
} else if ctx.KEY() != nil {
// Why do we have a KEY context here?
// When the user writes an expression like `service.name=redis`
// The `redis` part is a VALUE context but parsed as a KEY token
// so we return the text as is
return ctx.KEY().GetText()
}
return "" // Should not happen with valid input
}
// VisitKey handles field/column references
func (v *WhereClauseVisitor) VisitKey(ctx *KeyContext) any {
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(ctx.KEY().GetText())
keyName := strings.TrimPrefix(fieldKey.Name, telemetrylogs.BodyJSONStringSearchPrefix)
fieldKeysForName := v.fieldKeys[keyName]
// for the body json search, we need to add search on the body field even
// if there is a field with the same name as attribute/resource attribute
// Since it will ORed with the fieldKeysForName, it will not result empty
// when either of them have values
if strings.HasPrefix(fieldKey.Name, telemetrylogs.BodyJSONStringSearchPrefix) {
fieldKeysForName = append(fieldKeysForName, fieldKey)
}
// TODO(srikanthccv): do we want to return an error here?
// should we infer the type and auto-magically build a key for expression?
if len(fieldKeysForName) == 0 {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"key `%s` not found",
fieldKey.Name,
))
}
if len(fieldKeysForName) > 1 {
// this is warning state, we must have a unambiguous key
v.warnings = append(v.warnings, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"key `%s` is ambiguous, found %d different combinations of field context and data type: %v",
fieldKey.Name,
len(fieldKeysForName),
fieldKeysForName,
))
}
return fieldKeysForName
}

View File

@@ -1,638 +0,0 @@
package parser
import (
"reflect"
"strings"
"testing"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrytraces"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
func TestConvertToClickHouseLogsQuery(t *testing.T) {
cases := []struct {
name string
fieldKeys map[string][]telemetrytypes.TelemetryFieldKey
query string
expectedSearchString string
expectedSearchArgs []any
}{
{
name: "test-simple-service-name-filter",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"service.name": {
{
Name: "service.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "service.name=redis",
expectedSearchString: "WHERE (resources_string['service.name'] = ?)",
expectedSearchArgs: []any{"redis"},
},
{
name: "test-simple-service-name-filter-with-materialised-column",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"service.name": {
{
Name: "service.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
},
},
query: "service.name=redis",
expectedSearchString: "WHERE (resource_string_service$$name = ?)",
expectedSearchArgs: []any{"redis"},
},
{
name: "http-status-code-multiple-data-types",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"http.status_code": {
{
Name: "http.status_code",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
{
Name: "http.status_code",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "http.status_code=200",
expectedSearchString: "WHERE (attributes_number['http.status_code'] = ? OR toFloat64OrNull(attributes_string['http.status_code']) = ?)",
expectedSearchArgs: []any{float64(200), float64(200)},
},
{
name: "http-status-code-multiple-data-types-between-operator",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"http.status_code": {
{
Name: "http.status_code",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
{
Name: "http.status_code",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "http.status_code between 200 and 300",
expectedSearchString: "WHERE (attributes_number['http.status_code'] BETWEEN ? AND ? OR toFloat64OrNull(attributes_string['http.status_code']) BETWEEN ? AND ?)",
expectedSearchArgs: []any{float64(200), float64(300), float64(200), float64(300)},
},
{
name: "response-body-multiple-data-types-string-contains",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"response.body": {
{
Name: "response.body",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
{
Name: "response.body",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "response.body contains error",
expectedSearchString: "WHERE (LOWER(toString(attributes_number['response.body'])) LIKE LOWER(?) OR LOWER(attributes_string['response.body']) LIKE LOWER(?))",
expectedSearchArgs: []any{"%error%", "%error%"},
},
{
name: "search-on-top-level-key",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"severity_text": {
{
Name: "severity_text",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "severity_text=error",
expectedSearchString: "WHERE (severity_text = ?)",
expectedSearchArgs: []any{"error"},
},
{
name: "search-on-top-level-key-conflict-with-attribute",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"severity_text": {
{
Name: "severity_text",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: "severity_text",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "severity_text=error",
expectedSearchString: "WHERE (severity_text = ? OR attributes_string['severity_text'] = ?)",
expectedSearchArgs: []any{"error", "error"},
},
{
name: "collision-with-attribute-field-and-resource-attribute",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"k8s.namespace.name": {
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "k8s.namespace.name=test",
expectedSearchString: "WHERE (resources_string['k8s.namespace.name'] = ? OR attributes_string['k8s.namespace.name'] = ?)",
expectedSearchArgs: []any{"test", "test"},
},
{
name: "collision-with-attribute-field-and-resource-attribute-materialised-column",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"k8s.namespace.name": {
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "k8s.namespace.name=test",
expectedSearchString: "WHERE (resource_string_k8s$$namespace$$name = ? OR attributes_string['k8s.namespace.name'] = ?)",
expectedSearchArgs: []any{"test", "test"},
},
{
name: "boolean-collision-with-attribute-field-and-data-type-boolean",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"did_user_login": {
{
Name: "did_user_login",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
{
Name: "did_user_login",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "did_user_login=true",
expectedSearchString: "WHERE (attributes_bool['did_user_login'] = ? OR attributes_string['did_user_login'] = ?)",
expectedSearchArgs: []any{true, "true"},
},
{
name: "regexp-search",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"k8s.namespace.name": {
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"service.name": {
{
Name: "service.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "k8s.namespace.name REGEXP 'test' OR service.name='redis'",
expectedSearchString: "WHERE (((match(attributes_string['k8s.namespace.name'], ?))) OR (resources_string['service.name'] = ?))",
expectedSearchArgs: []any{"test", "redis"},
},
{
name: "full-text-search-multiple-words",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: "waiting for response",
expectedSearchString: "WHERE ((match(body, ?)) AND (match(body, ?)) AND (match(body, ?)))",
expectedSearchArgs: []any{"waiting", "for", "response"},
},
{
name: "full-text-search-with-phrase-search",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: `"waiting for response"`,
expectedSearchString: "WHERE (match(body, ?))",
expectedSearchArgs: []any{"waiting for response"},
},
{
name: "full-text-search-with-word-and-not-word",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: "error NOT buggy_app",
expectedSearchString: "WHERE ((match(body, ?)) AND NOT ((match(body, ?))))",
expectedSearchArgs: []any{"error", "buggy_app"},
},
{
name: "full-text-search-with-word-and-not-word-and-not-word",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: "error NOT buggy_app NOT redis",
expectedSearchString: "WHERE ((match(body, ?)) AND NOT ((match(body, ?))) AND NOT ((match(body, ?))))",
expectedSearchArgs: []any{"error", "buggy_app", "redis"},
},
{
name: "full-text-search-with-word-and-not-word-and-not-word-tricky",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: "error NOT buggy_app OR redis",
expectedSearchString: "WHERE (((match(body, ?)) AND NOT ((match(body, ?)))) OR (match(body, ?)))",
expectedSearchArgs: []any{"error", "buggy_app", "redis"},
},
{
name: "has-function",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"service.name": {
{
Name: "service.name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextResource,
},
},
"payload.user_ids": {
{
Name: "payload.user_ids",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
},
},
},
query: "has(service.name, 'redis')",
expectedSearchString: "WHERE (has(resources_string['service.name'], ?))",
expectedSearchArgs: []any{"redis"},
},
{
name: "has-from-list-of-values",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: "has(body.payload.user_ids[*], 'u1292')",
expectedSearchString: "WHERE (has(JSONExtract(JSON_QUERY(body, '$.payload.user_ids[*]'), 'Array(String)'), ?))",
expectedSearchArgs: []any{"u1292"},
},
{
name: "body-json-search-that-also-has-attribute-with-same-name",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"http.status_code": {
{
Name: "http.status_code",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
Materialized: true,
},
},
},
query: "body.http.status_code=200",
expectedSearchString: "WHERE (attribute_number_http$$status_code = ? OR JSONExtract(JSON_VALUE(body, '$.http.status_code'), 'Float64') = ?)",
expectedSearchArgs: []any{float64(200), float64(200)},
},
}
for _, c := range cases {
t.Logf("running test %s", c.name)
chQuery, chQueryArgs, _, err := PrepareWhereClause(c.query, c.fieldKeys, telemetrylogs.NewConditionBuilder(), telemetrytypes.TelemetryFieldKey{
Name: "body",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
})
if err != nil {
t.Errorf("Error converting query to ClickHouse: %v", err)
}
if chQuery != c.expectedSearchString {
t.Errorf("Expected %s, got %s", c.expectedSearchString, chQuery)
}
if !reflect.DeepEqual(chQueryArgs, c.expectedSearchArgs) {
for i, arg := range chQueryArgs {
t.Logf("Expected %v with type %T, got %v with type %T\n", c.expectedSearchArgs[i], c.expectedSearchArgs[i], arg, arg)
}
t.Errorf("Expected %v, got %v", c.expectedSearchArgs, chQueryArgs)
}
}
}
func TestConvertToClickHouseSpansQuery(t *testing.T) {
cases := []struct {
name string
fieldKeys map[string][]telemetrytypes.TelemetryFieldKey
query string
expectedSearchString string
expectedSearchArgs []any
}{
{
name: "test-simple-service-name-filter",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"service.name": {
{
Name: "service.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "service.name=redis",
expectedSearchString: "WHERE (resources_string['service.name'] = ?)",
expectedSearchArgs: []any{"redis"},
},
{
name: "test-simple-service-name-filter-with-materialised-column",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"service.name": {
{
Name: "service.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
},
},
query: "service.name=redis",
expectedSearchString: "WHERE (resource_string_service$$name = ?)",
expectedSearchArgs: []any{"redis"},
},
{
name: "http-status-code-multiple-data-types",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"http.status_code": {
{
Name: "http.status_code",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
{
Name: "http.status_code",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "http.status_code=200",
expectedSearchString: "WHERE (attributes_number['http.status_code'] = ? OR toFloat64OrNull(attributes_string['http.status_code']) = ?)",
expectedSearchArgs: []any{float64(200), float64(200)},
},
{
name: "http-status-code-multiple-data-types-between-operator",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"http.status_code": {
{
Name: "http.status_code",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
{
Name: "http.status_code",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "http.status_code between 200 and 300",
expectedSearchString: "WHERE (attributes_number['http.status_code'] BETWEEN ? AND ? OR toFloat64OrNull(attributes_string['http.status_code']) BETWEEN ? AND ?)",
expectedSearchArgs: []any{float64(200), float64(300), float64(200), float64(300)},
},
{
name: "response-body-multiple-data-types-string-contains",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"response.body": {
{
Name: "response.body",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
{
Name: "response.body",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "response.body contains error",
expectedSearchString: "WHERE (LOWER(toString(attributes_number['response.body'])) LIKE LOWER(?) OR LOWER(attributes_string['response.body']) LIKE LOWER(?))",
expectedSearchArgs: []any{"%error%", "%error%"},
},
{
name: "collision-with-attribute-field-and-resource-attribute",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"k8s.namespace.name": {
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "k8s.namespace.name=test",
expectedSearchString: "WHERE (resources_string['k8s.namespace.name'] = ? OR attributes_string['k8s.namespace.name'] = ?)",
expectedSearchArgs: []any{"test", "test"},
},
{
name: "collision-with-attribute-field-and-resource-attribute-materialised-column",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"k8s.namespace.name": {
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "k8s.namespace.name=test",
expectedSearchString: "WHERE (resource_string_k8s$$namespace$$name = ? OR attributes_string['k8s.namespace.name'] = ?)",
expectedSearchArgs: []any{"test", "test"},
},
{
name: "boolean-collision-with-attribute-field-and-data-type-boolean",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"did_user_login": {
{
Name: "did_user_login",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
{
Name: "did_user_login",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "did_user_login=true",
expectedSearchString: "WHERE (attributes_bool['did_user_login'] = ? OR attributes_string['did_user_login'] = ?)",
expectedSearchArgs: []any{true, "true"},
},
{
name: "regexp-search",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{
"k8s.namespace.name": {
{
Name: "k8s.namespace.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"service.name": {
{
Name: "service.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
query: "k8s.namespace.name REGEXP 'test' OR service.name='redis'",
expectedSearchString: "WHERE (((match(attributes_string['k8s.namespace.name'], ?))) OR (resources_string['service.name'] = ?))",
expectedSearchArgs: []any{"test", "redis"},
},
}
for _, c := range cases {
chQuery, chQueryArgs, _, err := PrepareWhereClause(c.query, c.fieldKeys, telemetrytraces.NewConditionBuilder(), telemetrytypes.TelemetryFieldKey{
Name: "dummy",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
})
if err != nil {
t.Errorf("Error converting query to ClickHouse: %v", err)
}
if chQuery != c.expectedSearchString {
t.Errorf("Expected %s, got %s", c.expectedSearchString, chQuery)
}
if !reflect.DeepEqual(chQueryArgs, c.expectedSearchArgs) {
for i, arg := range chQueryArgs {
t.Logf("Expected %v with type %T, got %v with type %T\n", c.expectedSearchArgs[i], c.expectedSearchArgs[i], arg, arg)
}
t.Errorf("Expected %v, got %v", c.expectedSearchArgs, chQueryArgs)
}
}
}
func TestConvertToClickHouseSpansQueryWithErrors(t *testing.T) {
cases := []struct {
name string
fieldKeys map[string][]telemetrytypes.TelemetryFieldKey
query string
expectedSearchString string
expectedSearchArgs []any
expectedErrorSubString string
expectedWarnings []error
}{
{
name: "has-function-with-multiple-values",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: "key.that.does.not.exist = 'redis'",
expectedSearchString: "",
expectedSearchArgs: []any{},
expectedErrorSubString: "key `key.that.does.not.exist` not found",
expectedWarnings: []error{},
},
{
name: "unknown-function",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: "unknown.function()",
expectedSearchString: "",
expectedSearchArgs: []any{},
expectedErrorSubString: "expecting {'(', NOT, HAS, HASANY, HASALL, QUOTED_TEXT, KEY, FREETEXT}",
expectedWarnings: []error{},
},
{
name: "has-function-not-enough-params",
fieldKeys: map[string][]telemetrytypes.TelemetryFieldKey{},
query: "has(key.that.does.not.exist)",
expectedSearchString: "",
expectedSearchArgs: []any{},
expectedErrorSubString: "function `has` expects key and value parameters",
expectedWarnings: []error{},
},
}
for _, c := range cases {
_, _, warnings, err := PrepareWhereClause(c.query, c.fieldKeys, telemetrytraces.NewConditionBuilder(), telemetrytypes.TelemetryFieldKey{
Name: "dummy",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
})
if err != nil {
if !strings.Contains(err.Error(), c.expectedErrorSubString) {
t.Errorf("Expected error %v, got %v", c.expectedErrorSubString, err)
}
}
if len(warnings) != len(c.expectedWarnings) {
t.Errorf("Expected %d warnings, got %d", len(c.expectedWarnings), len(warnings))
}
for i, warning := range warnings {
if warning.Error() != c.expectedWarnings[i].Error() {
t.Errorf("Expected warning %d to be %v, got %v", i, c.expectedWarnings[i], warning)
}
}
}
}

View File

@@ -1085,7 +1085,7 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
item.Attributes_string[k] = fmt.Sprintf("%v", v)
}
for k, v := range item.Attributes_number {
item.Attributes_string[k] = strconv.FormatFloat(v, 'f', -1, 64)
item.Attributes_string[k] = fmt.Sprintf("%v", v)
}
for k, v := range item.Resources_string {
item.Attributes_string[k] = v
@@ -3928,16 +3928,11 @@ func (r *ClickHouseReader) GetLogAttributeKeys(ctx context.Context, req *v3.Filt
var rows driver.Rows
var response v3.FilterAttributeKeyResponse
tagTypeFilter := `tag_type != 'logfield'`
if req.TagType != "" {
tagTypeFilter = fmt.Sprintf(`tag_type != 'logfield' and tag_type = '%s'`, req.TagType)
}
if len(req.SearchText) != 0 {
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where %s and tag_key ILIKE $1 limit $2", r.logsDB, r.logsTagAttributeTableV2, tagTypeFilter)
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where tag_type != 'logfield' and tag_key ILIKE $1 limit $2", r.logsDB, r.logsTagAttributeTableV2)
rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText), req.Limit)
} else {
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where %s limit $1", r.logsDB, r.logsTagAttributeTableV2, tagTypeFilter)
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where tag_type != 'logfield' limit $1", r.logsDB, r.logsTagAttributeTableV2)
rows, err = r.db.Query(ctx, query, req.Limit)
}
@@ -3972,16 +3967,13 @@ func (r *ClickHouseReader) GetLogAttributeKeys(ctx context.Context, req *v3.Filt
response.AttributeKeys = append(response.AttributeKeys, key)
}
// add other attributes only when the tagType is not specified
// i.e retrieve all attributes
if req.TagType == "" {
for _, f := range constants.StaticFieldsLogsV3 {
if (v3.AttributeKey{} == f) {
continue
}
if len(req.SearchText) == 0 || strings.Contains(f.Key, req.SearchText) {
response.AttributeKeys = append(response.AttributeKeys, f)
}
// add other attributes
for _, f := range constants.StaticFieldsLogsV3 {
if (v3.AttributeKey{} == f) {
continue
}
if len(req.SearchText) == 0 || strings.Contains(f.Key, req.SearchText) {
response.AttributeKeys = append(response.AttributeKeys, f)
}
}
@@ -4241,12 +4233,11 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
isValidPoint = true
point.Value = float64(reflect.ValueOf(v).Elem().Float())
} else {
val := strconv.FormatFloat(reflect.ValueOf(v).Elem().Float(), 'f', -1, 64)
groupBy = append(groupBy, val)
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float()))
if _, ok := groupAttributes[colName]; !ok {
groupAttributesArray = append(groupAttributesArray, map[string]string{colName: val})
groupAttributesArray = append(groupAttributesArray, map[string]string{colName: fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float())})
}
groupAttributes[colName] = val
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float())
}
case **float64, **float32:
val := reflect.ValueOf(v)
@@ -4256,12 +4247,11 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
isValidPoint = true
point.Value = value
} else {
val := strconv.FormatFloat(value, 'f', -1, 64)
groupBy = append(groupBy, val)
groupBy = append(groupBy, fmt.Sprintf("%v", value))
if _, ok := groupAttributes[colName]; !ok {
groupAttributesArray = append(groupAttributesArray, map[string]string{colName: val})
groupAttributesArray = append(groupAttributesArray, map[string]string{colName: fmt.Sprintf("%v", value)})
}
groupAttributes[colName] = val
groupAttributes[colName] = fmt.Sprintf("%v", value)
}
}
case *uint, *uint8, *uint64, *uint16, *uint32:
@@ -4725,12 +4715,7 @@ func (r *ClickHouseReader) GetTraceAttributeKeys(ctx context.Context, req *v3.Fi
var rows driver.Rows
var response v3.FilterAttributeKeyResponse
tagTypeFilter := `tag_type != 'spanfield'`
if req.TagType != "" {
tagTypeFilter = fmt.Sprintf(`tag_type != 'spanfield' and tag_type = '%s'`, req.TagType)
}
query = fmt.Sprintf("SELECT DISTINCT(tag_key), tag_type, tag_data_type FROM %s.%s WHERE tag_key ILIKE $1 and %s LIMIT $2", r.TraceDB, r.spanAttributeTableV2, tagTypeFilter)
query = fmt.Sprintf("SELECT DISTINCT(tag_key), tag_type, tag_data_type FROM %s.%s WHERE tag_key ILIKE $1 and tag_type != 'spanfield' LIMIT $2", r.TraceDB, r.spanAttributeTableV2)
rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText), req.Limit)
@@ -4775,16 +4760,13 @@ func (r *ClickHouseReader) GetTraceAttributeKeys(ctx context.Context, req *v3.Fi
fields = constants.DeprecatedStaticFieldsTraces
}
// add the new static fields only when the tagType is not specified
// i.e retrieve all attributes
if req.TagType == "" {
for _, f := range fields {
if (v3.AttributeKey{} == f) {
continue
}
if len(req.SearchText) == 0 || strings.Contains(f.Key, req.SearchText) {
response.AttributeKeys = append(response.AttributeKeys, f)
}
// add the new static fields
for _, f := range fields {
if (v3.AttributeKey{} == f) {
continue
}
if len(req.SearchText) == 0 || strings.Contains(f.Key, req.SearchText) {
response.AttributeKeys = append(response.AttributeKeys, f)
}
}
@@ -6891,7 +6873,7 @@ func (r *ClickHouseReader) SearchTracesV2(ctx context.Context, params *model.Sea
item.Attributes_string[k] = fmt.Sprintf("%v", v)
}
for k, v := range item.Attributes_number {
item.Attributes_string[k] = strconv.FormatFloat(v, 'f', -1, 64)
item.Attributes_string[k] = fmt.Sprintf("%v", v)
}
for k, v := range item.Resources_string {
item.Attributes_string[k] = v

View File

@@ -8,59 +8,68 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
"github.com/jmoiron/sqlx"
)
type cloudProviderAccountsRepository interface {
listConnected(ctx context.Context, orgId string, provider string) ([]types.CloudIntegration, *model.ApiError)
listConnected(ctx context.Context, cloudProvider string) ([]AccountRecord, *model.ApiError)
get(ctx context.Context, orgId string, provider string, id string) (*types.CloudIntegration, *model.ApiError)
get(ctx context.Context, cloudProvider string, id string) (*AccountRecord, *model.ApiError)
getConnectedCloudAccount(ctx context.Context, orgId string, provider string, accountID string) (*types.CloudIntegration, *model.ApiError)
getConnectedCloudAccount(
ctx context.Context, cloudProvider string, cloudAccountId string,
) (*AccountRecord, *model.ApiError)
// Insert an account or update it by (cloudProvider, id)
// for specified non-empty fields
upsert(
ctx context.Context,
orgId string,
provider string,
cloudProvider string,
id *string,
config *types.AccountConfig,
accountId *string,
agentReport *types.AgentReport,
config *AccountConfig,
cloudAccountId *string,
agentReport *AgentReport,
removedAt *time.Time,
) (*types.CloudIntegration, *model.ApiError)
) (*AccountRecord, *model.ApiError)
}
func newCloudProviderAccountsRepository(store sqlstore.SQLStore) (
func newCloudProviderAccountsRepository(db *sqlx.DB) (
*cloudProviderAccountsSQLRepository, error,
) {
return &cloudProviderAccountsSQLRepository{
store: store,
db: db,
}, nil
}
type cloudProviderAccountsSQLRepository struct {
store sqlstore.SQLStore
db *sqlx.DB
}
func (r *cloudProviderAccountsSQLRepository) listConnected(
ctx context.Context, orgId string, cloudProvider string,
) ([]types.CloudIntegration, *model.ApiError) {
accounts := []types.CloudIntegration{}
err := r.store.BunDB().NewSelect().
Model(&accounts).
Where("org_id = ?", orgId).
Where("provider = ?", cloudProvider).
Where("removed_at is NULL").
Where("account_id is not NULL").
Where("last_agent_report is not NULL").
Order("created_at").
Scan(ctx)
ctx context.Context, cloudProvider string,
) ([]AccountRecord, *model.ApiError) {
accounts := []AccountRecord{}
err := r.db.SelectContext(
ctx, &accounts, `
select
cloud_provider,
id,
config_json,
cloud_account_id,
last_agent_report_json,
created_at,
removed_at
from cloud_integrations_accounts
where
cloud_provider=$1
and removed_at is NULL
and cloud_account_id is not NULL
and last_agent_report_json is not NULL
order by created_at
`, cloudProvider,
)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not query connected cloud accounts: %w", err,
@@ -71,16 +80,27 @@ func (r *cloudProviderAccountsSQLRepository) listConnected(
}
func (r *cloudProviderAccountsSQLRepository) get(
ctx context.Context, orgId string, provider string, id string,
) (*types.CloudIntegration, *model.ApiError) {
var result types.CloudIntegration
ctx context.Context, cloudProvider string, id string,
) (*AccountRecord, *model.ApiError) {
var result AccountRecord
err := r.store.BunDB().NewSelect().
Model(&result).
Where("org_id = ?", orgId).
Where("provider = ?", provider).
Where("id = ?", id).
Scan(ctx)
err := r.db.GetContext(
ctx, &result, `
select
cloud_provider,
id,
config_json,
cloud_account_id,
last_agent_report_json,
created_at,
removed_at
from cloud_integrations_accounts
where
cloud_provider=$1
and id=$2
`,
cloudProvider, id,
)
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
@@ -96,22 +116,33 @@ func (r *cloudProviderAccountsSQLRepository) get(
}
func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
ctx context.Context, orgId string, provider string, accountId string,
) (*types.CloudIntegration, *model.ApiError) {
var result types.CloudIntegration
ctx context.Context, cloudProvider string, cloudAccountId string,
) (*AccountRecord, *model.ApiError) {
var result AccountRecord
err := r.store.BunDB().NewSelect().
Model(&result).
Where("org_id = ?", orgId).
Where("provider = ?", provider).
Where("account_id = ?", accountId).
Where("last_agent_report is not NULL").
Where("removed_at is NULL").
Scan(ctx)
err := r.db.GetContext(
ctx, &result, `
select
cloud_provider,
id,
config_json,
cloud_account_id,
last_agent_report_json,
created_at,
removed_at
from cloud_integrations_accounts
where
cloud_provider=$1
and cloud_account_id=$2
and last_agent_report_json is not NULL
and removed_at is NULL
`,
cloudProvider, cloudAccountId,
)
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
"couldn't find connected cloud account %s", accountId,
"couldn't find connected cloud account %s", cloudAccountId,
))
} else if err != nil {
return nil, model.InternalError(fmt.Errorf(
@@ -124,18 +155,17 @@ func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
func (r *cloudProviderAccountsSQLRepository) upsert(
ctx context.Context,
orgId string,
provider string,
cloudProvider string,
id *string,
config *types.AccountConfig,
accountId *string,
agentReport *types.AgentReport,
config *AccountConfig,
cloudAccountId *string,
agentReport *AgentReport,
removedAt *time.Time,
) (*types.CloudIntegration, *model.ApiError) {
) (*AccountRecord, *model.ApiError) {
// Insert
if id == nil {
temp := valuer.GenerateUUID().StringValue()
id = &temp
newId := uuid.NewString()
id = &newId
}
// Prepare clause for setting values in `on conflict do update`
@@ -146,19 +176,19 @@ func (r *cloudProviderAccountsSQLRepository) upsert(
if config != nil {
onConflictSetStmts = append(
onConflictSetStmts, setColStatement("config"),
onConflictSetStmts, setColStatement("config_json"),
)
}
if accountId != nil {
if cloudAccountId != nil {
onConflictSetStmts = append(
onConflictSetStmts, setColStatement("account_id"),
onConflictSetStmts, setColStatement("cloud_account_id"),
)
}
if agentReport != nil {
onConflictSetStmts = append(
onConflictSetStmts, setColStatement("last_agent_report"),
onConflictSetStmts, setColStatement("last_agent_report_json"),
)
}
@@ -168,45 +198,37 @@ func (r *cloudProviderAccountsSQLRepository) upsert(
)
}
// set updated_at to current timestamp if it's an upsert
onConflictSetStmts = append(
onConflictSetStmts, setColStatement("updated_at"),
)
onConflictClause := ""
if len(onConflictSetStmts) > 0 {
onConflictClause = fmt.Sprintf(
"conflict(id, provider, org_id) do update SET\n%s",
"on conflict(cloud_provider, id) do update SET\n%s",
strings.Join(onConflictSetStmts, ",\n"),
)
}
integration := types.CloudIntegration{
OrgID: orgId,
Provider: provider,
Identifiable: types.Identifiable{ID: valuer.MustNewUUID(*id)},
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
Config: config,
AccountID: accountId,
LastAgentReport: agentReport,
RemovedAt: removedAt,
}
_, dbErr := r.store.BunDB().NewInsert().
Model(&integration).
On(onConflictClause).
Exec(ctx)
insertQuery := fmt.Sprintf(`
INSERT INTO cloud_integrations_accounts (
cloud_provider,
id,
config_json,
cloud_account_id,
last_agent_report_json,
removed_at
) values ($1, $2, $3, $4, $5, $6)
%s`, onConflictClause,
)
_, dbErr := r.db.ExecContext(
ctx, insertQuery,
cloudProvider, id, config, cloudAccountId, agentReport, removedAt,
)
if dbErr != nil {
return nil, model.InternalError(fmt.Errorf(
"could not upsert cloud account record: %w", dbErr,
))
}
upsertedAccount, apiErr := r.get(ctx, orgId, provider, *id)
upsertedAccount, apiErr := r.get(ctx, cloudProvider, *id)
if apiErr != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't fetch upserted account by id: %w", apiErr.ToError(),

View File

@@ -8,6 +8,7 @@ import (
"strings"
"time"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
@@ -27,18 +28,18 @@ func validateCloudProviderName(name string) *model.ApiError {
type Controller struct {
accountsRepo cloudProviderAccountsRepository
serviceConfigRepo serviceConfigRepository
serviceConfigRepo ServiceConfigDatabase
}
func NewController(sqlStore sqlstore.SQLStore) (
*Controller, error,
) {
accountsRepo, err := newCloudProviderAccountsRepository(sqlStore)
accountsRepo, err := newCloudProviderAccountsRepository(sqlStore.SQLxDB())
if err != nil {
return nil, fmt.Errorf("couldn't create cloud provider accounts repo: %w", err)
}
serviceConfigRepo, err := newServiceConfigRepository(sqlStore)
serviceConfigRepo, err := newServiceConfigRepository(sqlStore.SQLxDB())
if err != nil {
return nil, fmt.Errorf("couldn't create cloud provider service config repo: %w", err)
}
@@ -49,12 +50,19 @@ func NewController(sqlStore sqlstore.SQLStore) (
}, nil
}
type Account struct {
Id string `json:"id"`
CloudAccountId string `json:"cloud_account_id"`
Config AccountConfig `json:"config"`
Status AccountStatus `json:"status"`
}
type ConnectedAccountsListResponse struct {
Accounts []types.Account `json:"accounts"`
Accounts []Account `json:"accounts"`
}
func (c *Controller) ListConnectedAccounts(
ctx context.Context, orgId string, cloudProvider string,
ctx context.Context, cloudProvider string,
) (
*ConnectedAccountsListResponse, *model.ApiError,
) {
@@ -62,14 +70,14 @@ func (c *Controller) ListConnectedAccounts(
return nil, apiErr
}
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, orgId, cloudProvider)
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't list cloud accounts")
}
connectedAccounts := []types.Account{}
connectedAccounts := []Account{}
for _, a := range accountRecords {
connectedAccounts = append(connectedAccounts, a.Account())
connectedAccounts = append(connectedAccounts, a.account())
}
return &ConnectedAccountsListResponse{
@@ -81,7 +89,7 @@ type GenerateConnectionUrlRequest struct {
// Optional. To be specified for updates.
AccountId *string `json:"account_id,omitempty"`
AccountConfig types.AccountConfig `json:"account_config"`
AccountConfig AccountConfig `json:"account_config"`
AgentConfig SigNozAgentConfig `json:"agent_config"`
}
@@ -102,7 +110,7 @@ type GenerateConnectionUrlResponse struct {
}
func (c *Controller) GenerateConnectionUrl(
ctx context.Context, orgId string, cloudProvider string, req GenerateConnectionUrlRequest,
ctx context.Context, cloudProvider string, req GenerateConnectionUrlRequest,
) (*GenerateConnectionUrlResponse, *model.ApiError) {
// Account connection with a simple connection URL may not be available for all providers.
if cloudProvider != "aws" {
@@ -110,7 +118,7 @@ func (c *Controller) GenerateConnectionUrl(
}
account, apiErr := c.accountsRepo.upsert(
ctx, orgId, cloudProvider, req.AccountId, &req.AccountConfig, nil, nil, nil,
ctx, cloudProvider, req.AccountId, &req.AccountConfig, nil, nil, nil,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
@@ -128,7 +136,7 @@ func (c *Controller) GenerateConnectionUrl(
"param_SigNozIntegrationAgentVersion": agentVersion,
"param_SigNozApiUrl": req.AgentConfig.SigNozAPIUrl,
"param_SigNozApiKey": req.AgentConfig.SigNozAPIKey,
"param_SigNozAccountId": account.ID.StringValue(),
"param_SigNozAccountId": account.Id,
"param_IngestionUrl": req.AgentConfig.IngestionUrl,
"param_IngestionKey": req.AgentConfig.IngestionKey,
"stackName": "signoz-integration",
@@ -141,19 +149,19 @@ func (c *Controller) GenerateConnectionUrl(
}
return &GenerateConnectionUrlResponse{
AccountId: account.ID.StringValue(),
AccountId: account.Id,
ConnectionUrl: connectionUrl,
}, nil
}
type AccountStatusResponse struct {
Id string `json:"id"`
CloudAccountId *string `json:"cloud_account_id,omitempty"`
Status types.AccountStatus `json:"status"`
Id string `json:"id"`
CloudAccountId *string `json:"cloud_account_id,omitempty"`
Status AccountStatus `json:"status"`
}
func (c *Controller) GetAccountStatus(
ctx context.Context, orgId string, cloudProvider string, accountId string,
ctx context.Context, cloudProvider string, accountId string,
) (
*AccountStatusResponse, *model.ApiError,
) {
@@ -161,23 +169,23 @@ func (c *Controller) GetAccountStatus(
return nil, apiErr
}
account, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, accountId)
account, apiErr := c.accountsRepo.get(ctx, cloudProvider, accountId)
if apiErr != nil {
return nil, apiErr
}
resp := AccountStatusResponse{
Id: account.ID.StringValue(),
CloudAccountId: account.AccountID,
Status: account.Status(),
Id: account.Id,
CloudAccountId: account.CloudAccountId,
Status: account.status(),
}
return &resp, nil
}
type AgentCheckInRequest struct {
ID string `json:"account_id"`
AccountID string `json:"cloud_account_id"`
AccountId string `json:"account_id"`
CloudAccountId string `json:"cloud_account_id"`
// Arbitrary cloud specific Agent data
Data map[string]any `json:"data,omitempty"`
}
@@ -193,46 +201,46 @@ type AgentCheckInResponse struct {
type IntegrationConfigForAgent struct {
EnabledRegions []string `json:"enabled_regions"`
TelemetryCollectionStrategy *CloudTelemetryCollectionStrategy `json:"telemetry,omitempty"`
TelemetryCollectionStrategy *CompiledCollectionStrategy `json:"telemetry,omitempty"`
}
func (c *Controller) CheckInAsAgent(
ctx context.Context, orgId string, cloudProvider string, req AgentCheckInRequest,
ctx context.Context, cloudProvider string, req AgentCheckInRequest,
) (*AgentCheckInResponse, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
existingAccount, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, req.ID)
if existingAccount != nil && existingAccount.AccountID != nil && *existingAccount.AccountID != req.AccountID {
existingAccount, apiErr := c.accountsRepo.get(ctx, cloudProvider, req.AccountId)
if existingAccount != nil && existingAccount.CloudAccountId != nil && *existingAccount.CloudAccountId != req.CloudAccountId {
return nil, model.BadRequest(fmt.Errorf(
"can't check in with new %s account id %s for account %s with existing %s id %s",
cloudProvider, req.AccountID, existingAccount.ID.StringValue(), cloudProvider, *existingAccount.AccountID,
cloudProvider, req.CloudAccountId, existingAccount.Id, cloudProvider, *existingAccount.CloudAccountId,
))
}
existingAccount, apiErr = c.accountsRepo.getConnectedCloudAccount(ctx, orgId, cloudProvider, req.AccountID)
if existingAccount != nil && existingAccount.ID.StringValue() != req.ID {
existingAccount, apiErr = c.accountsRepo.getConnectedCloudAccount(ctx, cloudProvider, req.CloudAccountId)
if existingAccount != nil && existingAccount.Id != req.AccountId {
return nil, model.BadRequest(fmt.Errorf(
"can't check in to %s account %s with id %s. already connected with id %s",
cloudProvider, req.AccountID, req.ID, existingAccount.ID.StringValue(),
cloudProvider, req.CloudAccountId, req.AccountId, existingAccount.Id,
))
}
agentReport := types.AgentReport{
agentReport := AgentReport{
TimestampMillis: time.Now().UnixMilli(),
Data: req.Data,
}
account, apiErr := c.accountsRepo.upsert(
ctx, orgId, cloudProvider, &req.ID, nil, &req.AccountID, &agentReport, nil,
ctx, cloudProvider, &req.AccountId, nil, &req.CloudAccountId, &agentReport, nil,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
}
// prepare and return integration config to be consumed by agent
telemetryCollectionStrategy, err := NewCloudTelemetryCollectionStrategy(cloudProvider)
compliedStrategy, err := NewCompiledCollectionStrategy(cloudProvider)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't init telemetry collection strategy: %w", err,
@@ -241,24 +249,20 @@ func (c *Controller) CheckInAsAgent(
agentConfig := IntegrationConfigForAgent{
EnabledRegions: []string{},
TelemetryCollectionStrategy: telemetryCollectionStrategy,
TelemetryCollectionStrategy: compliedStrategy,
}
if account.Config != nil && account.Config.EnabledRegions != nil {
agentConfig.EnabledRegions = account.Config.EnabledRegions
}
services, apiErr := listCloudProviderServices(cloudProvider)
services, apiErr := services.Map(cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't list cloud services")
}
svcDetailsById := map[string]*CloudServiceDetails{}
for _, svcDetails := range services {
svcDetailsById[svcDetails.Id] = &svcDetails
}
svcConfigs, apiErr := c.serviceConfigRepo.getAllForAccount(
ctx, orgId, account.ID.StringValue(),
ctx, cloudProvider, *account.CloudAccountId,
)
if apiErr != nil {
return nil, model.WrapApiError(
@@ -271,75 +275,68 @@ func (c *Controller) CheckInAsAgent(
slices.Sort(configuredSvcIds)
for _, svcId := range configuredSvcIds {
svcDetails := svcDetailsById[svcId]
svcConfig := svcConfigs[svcId]
definition, ok := services[svcId]
if !ok {
continue
}
config := svcConfigs[svcId]
if svcDetails != nil {
metricsEnabled := svcConfig.Metrics != nil && svcConfig.Metrics.Enabled
logsEnabled := svcConfig.Logs != nil && svcConfig.Logs.Enabled
if logsEnabled || metricsEnabled {
err := agentConfig.TelemetryCollectionStrategy.AddServiceStrategy(
svcDetails.TelemetryCollectionStrategy, logsEnabled, metricsEnabled,
)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't add service telemetry collection strategy: %w", err,
))
}
}
err := AddServiceStrategy(compliedStrategy, definition.Strategy, config)
if err != nil {
return nil, model.InternalError(
fmt.Errorf("couldn't add service telemetry collection strategy: %s", err))
}
}
return &AgentCheckInResponse{
AccountId: account.ID.StringValue(),
CloudAccountId: *account.AccountID,
AccountId: account.Id,
CloudAccountId: *account.CloudAccountId,
RemovedAt: account.RemovedAt,
IntegrationConfig: agentConfig,
}, nil
}
type UpdateAccountConfigRequest struct {
Config types.AccountConfig `json:"config"`
Config AccountConfig `json:"config"`
}
func (c *Controller) UpdateAccountConfig(
ctx context.Context,
orgId string,
cloudProvider string,
accountId string,
req UpdateAccountConfigRequest,
) (*types.Account, *model.ApiError) {
) (*Account, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
accountRecord, apiErr := c.accountsRepo.upsert(
ctx, orgId, cloudProvider, &accountId, &req.Config, nil, nil, nil,
ctx, cloudProvider, &accountId, &req.Config, nil, nil, nil,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
}
account := accountRecord.Account()
account := accountRecord.account()
return &account, nil
}
func (c *Controller) DisconnectAccount(
ctx context.Context, orgId string, cloudProvider string, accountId string,
) (*types.CloudIntegration, *model.ApiError) {
ctx context.Context, cloudProvider string, accountId string,
) (*AccountRecord, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
account, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, accountId)
account, apiErr := c.accountsRepo.get(ctx, cloudProvider, accountId)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't disconnect account")
}
tsNow := time.Now()
account, apiErr = c.accountsRepo.upsert(
ctx, orgId, cloudProvider, &accountId, nil, nil, nil, &tsNow,
ctx, cloudProvider, &accountId, nil, nil, nil, &tsNow,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't disconnect account")
@@ -349,35 +346,27 @@ func (c *Controller) DisconnectAccount(
}
type ListServicesResponse struct {
Services []CloudServiceSummary `json:"services"`
Services []ServiceSummary `json:"services"`
}
func (c *Controller) ListServices(
ctx context.Context,
orgID string,
cloudProvider string,
cloudAccountId *string,
) (*ListServicesResponse, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
services, apiErr := listCloudProviderServices(cloudProvider)
definitions, apiErr := services.List(cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't list cloud services")
}
svcConfigs := map[string]*types.CloudServiceConfig{}
svcConfigs := map[string]*ServiceConfig{}
if cloudAccountId != nil {
activeAccount, apiErr := c.accountsRepo.getConnectedCloudAccount(
ctx, orgID, cloudProvider, *cloudAccountId,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't get active account")
}
svcConfigs, apiErr = c.serviceConfigRepo.getAllForAccount(
ctx, orgID, activeAccount.ID.StringValue(),
ctx, cloudProvider, *cloudAccountId,
)
if apiErr != nil {
return nil, model.WrapApiError(
@@ -386,9 +375,11 @@ func (c *Controller) ListServices(
}
}
summaries := []CloudServiceSummary{}
for _, s := range services {
summary := s.CloudServiceSummary
summaries := []ServiceSummary{}
for _, def := range definitions {
summary := ServiceSummary{
Metadata: def.Metadata,
}
summary.Config = svcConfigs[summary.Id]
summaries = append(summaries, summary)
@@ -401,102 +392,88 @@ func (c *Controller) ListServices(
func (c *Controller) GetServiceDetails(
ctx context.Context,
orgID string,
cloudProvider string,
serviceId string,
cloudAccountId *string,
) (*CloudServiceDetails, *model.ApiError) {
) (*ServiceDetails, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
service, apiErr := getCloudProviderService(cloudProvider, serviceId)
definition, apiErr := services.GetServiceDefinition(cloudProvider, serviceId)
if apiErr != nil {
return nil, apiErr
}
details := ServiceDetails{
Definition: *definition,
}
if cloudAccountId != nil {
activeAccount, apiErr := c.accountsRepo.getConnectedCloudAccount(
ctx, orgID, cloudProvider, *cloudAccountId,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't get active account")
}
config, apiErr := c.serviceConfigRepo.get(
ctx, orgID, activeAccount.ID.StringValue(), serviceId,
ctx, cloudProvider, *cloudAccountId, serviceId,
)
if apiErr != nil && apiErr.Type() != model.ErrorNotFound {
return nil, model.WrapApiError(apiErr, "couldn't fetch service config")
}
if config != nil {
service.Config = config
details.Config = config
enabled := false
if config.Metrics != nil && config.Metrics.Enabled {
enabled = true
}
// add links to service dashboards, making them clickable.
for i, d := range service.Assets.Dashboards {
dashboardUuid := c.dashboardUuid(
cloudProvider, serviceId, d.Id,
)
if enabled {
service.Assets.Dashboards[i].Url = fmt.Sprintf(
// add links to service dashboards, making them clickable.
for i, d := range details.Assets.Dashboards {
dashboardUuid := c.dashboardUuid(
cloudProvider, serviceId, d.Id,
)
details.Assets.Dashboards[i].Url = fmt.Sprintf(
"/dashboard/%s", dashboardUuid,
)
} else {
service.Assets.Dashboards[i].Url = ""
}
}
}
}
return service, nil
return &details, nil
}
type UpdateServiceConfigRequest struct {
CloudAccountId string `json:"cloud_account_id"`
Config types.CloudServiceConfig `json:"config"`
CloudAccountId string `json:"cloud_account_id"`
Config ServiceConfig `json:"config"`
}
type UpdateServiceConfigResponse struct {
Id string `json:"id"`
Config types.CloudServiceConfig `json:"config"`
Id string `json:"id"`
Config ServiceConfig `json:"config"`
}
func (c *Controller) UpdateServiceConfig(
ctx context.Context,
orgID string,
cloudProvider string,
serviceId string,
req UpdateServiceConfigRequest,
) (*UpdateServiceConfigResponse, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
// can only update config for a connected cloud account id
_, apiErr := c.accountsRepo.getConnectedCloudAccount(
ctx, orgID, cloudProvider, req.CloudAccountId,
ctx, cloudProvider, req.CloudAccountId,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't find connected cloud account")
}
// can only update config for a valid service.
_, apiErr = getCloudProviderService(cloudProvider, serviceId)
_, apiErr = services.GetServiceDefinition(cloudProvider, serviceId)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "unsupported service")
}
updatedConfig, apiErr := c.serviceConfigRepo.upsert(
ctx, orgID, cloudProvider, req.CloudAccountId, serviceId, req.Config,
ctx, cloudProvider, req.CloudAccountId, serviceId, req.Config,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't update service config")
@@ -510,13 +487,13 @@ func (c *Controller) UpdateServiceConfig(
// All dashboards that are available based on cloud integrations configuration
// across all cloud providers
func (c *Controller) AvailableDashboards(ctx context.Context, orgId string) (
func (c *Controller) AvailableDashboards(ctx context.Context) (
[]types.Dashboard, *model.ApiError,
) {
allDashboards := []types.Dashboard{}
for _, provider := range []string{"aws"} {
providerDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, orgId, provider)
providerDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, provider)
if apiErr != nil {
return nil, model.WrapApiError(
apiErr, fmt.Sprintf("couldn't get available dashboards for %s", provider),
@@ -530,10 +507,10 @@ func (c *Controller) AvailableDashboards(ctx context.Context, orgId string) (
}
func (c *Controller) AvailableDashboardsForCloudProvider(
ctx context.Context, orgID string, cloudProvider string,
ctx context.Context, cloudProvider string,
) ([]types.Dashboard, *model.ApiError) {
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, orgID, cloudProvider)
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't list connected cloud accounts")
}
@@ -542,9 +519,9 @@ func (c *Controller) AvailableDashboardsForCloudProvider(
servicesWithAvailableMetrics := map[string]*time.Time{}
for _, ar := range accountRecords {
if ar.AccountID != nil {
if ar.CloudAccountId != nil {
configsBySvcId, apiErr := c.serviceConfigRepo.getAllForAccount(
ctx, orgID, ar.ID.StringValue(),
ctx, cloudProvider, *ar.CloudAccountId,
)
if apiErr != nil {
return nil, apiErr
@@ -558,7 +535,7 @@ func (c *Controller) AvailableDashboardsForCloudProvider(
}
}
allServices, apiErr := listCloudProviderServices(cloudProvider)
allServices, apiErr := services.List(cloudProvider)
if apiErr != nil {
return nil, apiErr
}
@@ -592,7 +569,6 @@ func (c *Controller) AvailableDashboardsForCloudProvider(
}
func (c *Controller) GetDashboardById(
ctx context.Context,
orgId string,
dashboardUuid string,
) (*types.Dashboard, *model.ApiError) {
cloudProvider, _, _, apiErr := c.parseDashboardUuid(dashboardUuid)
@@ -600,7 +576,7 @@ func (c *Controller) GetDashboardById(
return nil, apiErr
}
allDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, orgId, cloudProvider)
allDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(
apiErr, fmt.Sprintf("couldn't list available dashboards"),

View File

@@ -4,30 +4,23 @@ import (
"context"
"testing"
"github.com/SigNoz/signoz/pkg/query-service/auth"
"github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/dao"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/types"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
)
func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
require := require.New(t)
sqlStore := utils.NewQueryServiceDBForTests(t)
sqlStore, _ := utils.NewTestSqliteDB(t)
controller, err := NewController(sqlStore)
require.NoError(err)
user, apiErr := createTestUser()
require.Nil(apiErr)
// should be able to generate connection url for
// same account id again with updated config
testAccountConfig1 := types.AccountConfig{EnabledRegions: []string{"us-east-1", "us-west-1"}}
testAccountConfig1 := AccountConfig{EnabledRegions: []string{"us-east-1", "us-west-1"}}
resp1, apiErr := controller.GenerateConnectionUrl(
context.TODO(), user.OrgID, "aws", GenerateConnectionUrlRequest{
context.TODO(), "aws", GenerateConnectionUrlRequest{
AccountConfig: testAccountConfig1,
AgentConfig: SigNozAgentConfig{Region: "us-east-2"},
},
@@ -38,14 +31,14 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
testAccountId := resp1.AccountId
account, apiErr := controller.accountsRepo.get(
context.TODO(), user.OrgID, "aws", testAccountId,
context.TODO(), "aws", testAccountId,
)
require.Nil(apiErr)
require.Equal(testAccountConfig1, *account.Config)
testAccountConfig2 := types.AccountConfig{EnabledRegions: []string{"us-east-2", "us-west-2"}}
testAccountConfig2 := AccountConfig{EnabledRegions: []string{"us-east-2", "us-west-2"}}
resp2, apiErr := controller.GenerateConnectionUrl(
context.TODO(), user.OrgID, "aws", GenerateConnectionUrlRequest{
context.TODO(), "aws", GenerateConnectionUrlRequest{
AccountId: &testAccountId,
AccountConfig: testAccountConfig2,
AgentConfig: SigNozAgentConfig{Region: "us-east-2"},
@@ -55,7 +48,7 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
require.Equal(testAccountId, resp2.AccountId)
account, apiErr = controller.accountsRepo.get(
context.TODO(), user.OrgID, "aws", testAccountId,
context.TODO(), "aws", testAccountId,
)
require.Nil(apiErr)
require.Equal(testAccountConfig2, *account.Config)
@@ -63,21 +56,18 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
func TestAgentCheckIns(t *testing.T) {
require := require.New(t)
sqlStore := utils.NewQueryServiceDBForTests(t)
sqlStore, _ := utils.NewTestSqliteDB(t)
controller, err := NewController(sqlStore)
require.NoError(err)
user, apiErr := createTestUser()
require.Nil(apiErr)
// An agent should be able to check in from a cloud account even
// if no connection url was requested (no account with agent's account id exists)
testAccountId1 := uuid.NewString()
testCloudAccountId1 := "546311234"
resp1, apiErr := controller.CheckInAsAgent(
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
ID: testAccountId1,
AccountID: testCloudAccountId1,
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId1,
CloudAccountId: testCloudAccountId1,
},
)
require.Nil(apiErr)
@@ -88,9 +78,9 @@ func TestAgentCheckIns(t *testing.T) {
// cloud account id for the same account.
testCloudAccountId2 := "99999999"
_, apiErr = controller.CheckInAsAgent(
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
ID: testAccountId1,
AccountID: testCloudAccountId2,
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId1,
CloudAccountId: testCloudAccountId2,
},
)
require.NotNil(apiErr)
@@ -100,18 +90,18 @@ func TestAgentCheckIns(t *testing.T) {
// i.e. there can't be 2 connected account records for the same cloud account id
// at any point in time.
existingConnected, apiErr := controller.accountsRepo.getConnectedCloudAccount(
context.TODO(), user.OrgID, "aws", testCloudAccountId1,
context.TODO(), "aws", testCloudAccountId1,
)
require.Nil(apiErr)
require.NotNil(existingConnected)
require.Equal(testCloudAccountId1, *existingConnected.AccountID)
require.Equal(testCloudAccountId1, *existingConnected.CloudAccountId)
require.Nil(existingConnected.RemovedAt)
testAccountId2 := uuid.NewString()
_, apiErr = controller.CheckInAsAgent(
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
ID: testAccountId2,
AccountID: testCloudAccountId1,
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId2,
CloudAccountId: testCloudAccountId1,
},
)
require.NotNil(apiErr)
@@ -119,29 +109,29 @@ func TestAgentCheckIns(t *testing.T) {
// After disconnecting existing account record, the agent should be able to
// connected for a particular cloud account id
_, apiErr = controller.DisconnectAccount(
context.TODO(), user.OrgID, "aws", testAccountId1,
context.TODO(), "aws", testAccountId1,
)
existingConnected, apiErr = controller.accountsRepo.getConnectedCloudAccount(
context.TODO(), user.OrgID, "aws", testCloudAccountId1,
context.TODO(), "aws", testCloudAccountId1,
)
require.Nil(existingConnected)
require.NotNil(apiErr)
require.Equal(model.ErrorNotFound, apiErr.Type())
_, apiErr = controller.CheckInAsAgent(
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
ID: testAccountId2,
AccountID: testCloudAccountId1,
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId2,
CloudAccountId: testCloudAccountId1,
},
)
require.Nil(apiErr)
// should be able to keep checking in
_, apiErr = controller.CheckInAsAgent(
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
ID: testAccountId2,
AccountID: testCloudAccountId1,
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId2,
CloudAccountId: testCloudAccountId1,
},
)
require.Nil(apiErr)
@@ -149,16 +139,13 @@ func TestAgentCheckIns(t *testing.T) {
func TestCantDisconnectNonExistentAccount(t *testing.T) {
require := require.New(t)
sqlStore := utils.NewQueryServiceDBForTests(t)
sqlStore, _ := utils.NewTestSqliteDB(t)
controller, err := NewController(sqlStore)
require.NoError(err)
user, apiErr := createTestUser()
require.Nil(apiErr)
// Attempting to disconnect a non-existent account should return error
account, apiErr := controller.DisconnectAccount(
context.TODO(), user.OrgID, "aws", uuid.NewString(),
context.TODO(), "aws", uuid.NewString(),
)
require.NotNil(apiErr)
require.Equal(model.ErrorNotFound, apiErr.Type())
@@ -167,23 +154,15 @@ func TestCantDisconnectNonExistentAccount(t *testing.T) {
func TestConfigureService(t *testing.T) {
require := require.New(t)
sqlStore := utils.NewQueryServiceDBForTests(t)
sqlStore, _ := utils.NewTestSqliteDB(t)
controller, err := NewController(sqlStore)
require.NoError(err)
user, apiErr := createTestUser()
require.Nil(apiErr)
// create a connected account
testCloudAccountId := "546311234"
testConnectedAccount := makeTestConnectedAccount(t, user.OrgID, controller, testCloudAccountId)
require.Nil(testConnectedAccount.RemovedAt)
require.NotEmpty(testConnectedAccount.AccountID)
require.Equal(testCloudAccountId, *testConnectedAccount.AccountID)
// should start out without any service config
svcListResp, apiErr := controller.ListServices(
context.TODO(), user.OrgID, "aws", &testCloudAccountId,
context.TODO(), "aws", &testCloudAccountId,
)
require.Nil(apiErr)
@@ -191,20 +170,25 @@ func TestConfigureService(t *testing.T) {
require.Nil(svcListResp.Services[0].Config)
svcDetails, apiErr := controller.GetServiceDetails(
context.TODO(), user.OrgID, "aws", testSvcId, &testCloudAccountId,
context.TODO(), "aws", testSvcId, &testCloudAccountId,
)
require.Nil(apiErr)
require.Equal(testSvcId, svcDetails.Id)
require.Nil(svcDetails.Config)
// should be able to configure a service for a connected account
testSvcConfig := types.CloudServiceConfig{
Metrics: &types.CloudServiceMetricsConfig{
testConnectedAccount := makeTestConnectedAccount(t, controller, testCloudAccountId)
require.Nil(testConnectedAccount.RemovedAt)
require.NotNil(testConnectedAccount.CloudAccountId)
require.Equal(testCloudAccountId, *testConnectedAccount.CloudAccountId)
testSvcConfig := ServiceConfig{
Metrics: &MetricsConfig{
Enabled: true,
},
}
updateSvcConfigResp, apiErr := controller.UpdateServiceConfig(
context.TODO(), user.OrgID, "aws", testSvcId, UpdateServiceConfigRequest{
context.TODO(), "aws", testSvcId, UpdateServiceConfigRequest{
CloudAccountId: testCloudAccountId,
Config: testSvcConfig,
},
@@ -214,14 +198,14 @@ func TestConfigureService(t *testing.T) {
require.Equal(testSvcConfig, updateSvcConfigResp.Config)
svcDetails, apiErr = controller.GetServiceDetails(
context.TODO(), user.OrgID, "aws", testSvcId, &testCloudAccountId,
context.TODO(), "aws", testSvcId, &testCloudAccountId,
)
require.Nil(apiErr)
require.Equal(testSvcId, svcDetails.Id)
require.Equal(testSvcConfig, *svcDetails.Config)
svcListResp, apiErr = controller.ListServices(
context.TODO(), user.OrgID, "aws", &testCloudAccountId,
context.TODO(), "aws", &testCloudAccountId,
)
require.Nil(apiErr)
for _, svc := range svcListResp.Services {
@@ -232,12 +216,12 @@ func TestConfigureService(t *testing.T) {
// should not be able to configure service after cloud account has been disconnected
_, apiErr = controller.DisconnectAccount(
context.TODO(), user.OrgID, "aws", testConnectedAccount.ID.StringValue(),
context.TODO(), "aws", testConnectedAccount.Id,
)
require.Nil(apiErr)
_, apiErr = controller.UpdateServiceConfig(
context.TODO(), user.OrgID, "aws", testSvcId,
context.TODO(), "aws", testSvcId,
UpdateServiceConfigRequest{
CloudAccountId: testCloudAccountId,
Config: testSvcConfig,
@@ -247,7 +231,7 @@ func TestConfigureService(t *testing.T) {
// should not be able to configure a service for a cloud account id that is not connected yet
_, apiErr = controller.UpdateServiceConfig(
context.TODO(), user.OrgID, "aws", testSvcId,
context.TODO(), "aws", testSvcId,
UpdateServiceConfigRequest{
CloudAccountId: "9999999999",
Config: testSvcConfig,
@@ -257,7 +241,7 @@ func TestConfigureService(t *testing.T) {
// should not be able to set config for an unsupported service
_, apiErr = controller.UpdateServiceConfig(
context.TODO(), user.OrgID, "aws", "bad-service", UpdateServiceConfigRequest{
context.TODO(), "aws", "bad-service", UpdateServiceConfigRequest{
CloudAccountId: testCloudAccountId,
Config: testSvcConfig,
},
@@ -266,54 +250,22 @@ func TestConfigureService(t *testing.T) {
}
func makeTestConnectedAccount(t *testing.T, orgId string, controller *Controller, cloudAccountId string) *types.CloudIntegration {
func makeTestConnectedAccount(t *testing.T, controller *Controller, cloudAccountId string) *AccountRecord {
require := require.New(t)
// a check in from SigNoz agent creates or updates a connected account.
testAccountId := uuid.NewString()
resp, apiErr := controller.CheckInAsAgent(
context.TODO(), orgId, "aws", AgentCheckInRequest{
ID: testAccountId,
AccountID: cloudAccountId,
context.TODO(), "aws", AgentCheckInRequest{
AccountId: testAccountId,
CloudAccountId: cloudAccountId,
},
)
require.Nil(apiErr)
require.Equal(testAccountId, resp.AccountId)
require.Equal(cloudAccountId, resp.CloudAccountId)
acc, err := controller.accountsRepo.get(context.TODO(), orgId, "aws", resp.AccountId)
acc, err := controller.accountsRepo.get(context.TODO(), "aws", resp.AccountId)
require.Nil(err)
return acc
}
func createTestUser() (*types.User, *model.ApiError) {
// Create a test user for auth
ctx := context.Background()
org, apiErr := dao.DB().CreateOrg(ctx, &types.Organization{
Name: "test",
})
if apiErr != nil {
return nil, apiErr
}
group, apiErr := dao.DB().GetGroupByName(ctx, constants.AdminGroup)
if apiErr != nil {
return nil, apiErr
}
auth.InitAuthCache(ctx)
userId := uuid.NewString()
return dao.DB().CreateUser(
ctx,
&types.User{
ID: userId,
Name: "test",
Email: userId[:8] + "test@test.com",
Password: "test",
OrgID: org.ID,
GroupID: group.ID,
},
true,
)
}

View File

@@ -1,74 +1,176 @@
package cloudintegrations
import (
"database/sql/driver"
"encoding/json"
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
)
type CloudServiceSummary struct {
Id string `json:"id"`
Title string `json:"title"`
Icon string `json:"icon"`
type ServiceSummary struct {
services.Metadata
// Present only if the service has been configured in the
// context of a cloud provider account.
Config *types.CloudServiceConfig `json:"config,omitempty"`
Config *ServiceConfig `json:"config"`
}
type CloudServiceDetails struct {
CloudServiceSummary
type ServiceDetails struct {
services.Definition
Overview string `json:"overview"` // markdown
Assets CloudServiceAssets `json:"assets"`
SupportedSignals SupportedSignals `json:"supported_signals"`
DataCollected DataCollectedForService `json:"data_collected"`
ConnectionStatus *CloudServiceConnectionStatus `json:"status,omitempty"`
TelemetryCollectionStrategy *CloudTelemetryCollectionStrategy `json:"telemetry_collection_strategy"`
Config *ServiceConfig `json:"config"`
ConnectionStatus *ServiceConnectionStatus `json:"status,omitempty"`
}
type CloudServiceAssets struct {
Dashboards []CloudServiceDashboard `json:"dashboards"`
// Represents a cloud provider account for cloud integrations
type AccountRecord struct {
CloudProvider string `json:"cloud_provider" db:"cloud_provider"`
Id string `json:"id" db:"id"`
Config *AccountConfig `json:"config" db:"config_json"`
CloudAccountId *string `json:"cloud_account_id" db:"cloud_account_id"`
LastAgentReport *AgentReport `json:"last_agent_report" db:"last_agent_report_json"`
CreatedAt time.Time `json:"created_at" db:"created_at"`
RemovedAt *time.Time `json:"removed_at" db:"removed_at"`
}
type CloudServiceDashboard struct {
Id string `json:"id"`
Url string `json:"url"`
Title string `json:"title"`
Description string `json:"description"`
Image string `json:"image"`
Definition *types.DashboardData `json:"definition,omitempty"`
type AccountConfig struct {
EnabledRegions []string `json:"regions"`
}
type SupportedSignals struct {
Logs bool `json:"logs"`
Metrics bool `json:"metrics"`
func DefaultAccountConfig() AccountConfig {
return AccountConfig{
EnabledRegions: []string{},
}
}
type DataCollectedForService struct {
Logs []CollectedLogAttribute `json:"logs"`
Metrics []CollectedMetric `json:"metrics"`
// For serializing from db
func (c *AccountConfig) Scan(src any) error {
data, ok := src.([]byte)
if !ok {
return fmt.Errorf("tried to scan from %T instead of bytes", src)
}
return json.Unmarshal(data, &c)
}
type CollectedLogAttribute struct {
Name string `json:"name"`
Path string `json:"path"`
Type string `json:"type"`
// For serializing to db
func (c *AccountConfig) Value() (driver.Value, error) {
if c == nil {
return nil, nil
}
serialized, err := json.Marshal(c)
if err != nil {
return nil, fmt.Errorf(
"couldn't serialize cloud account config to JSON: %w", err,
)
}
return serialized, nil
}
type CollectedMetric struct {
Name string `json:"name"`
Type string `json:"type"`
Unit string `json:"unit"`
Description string `json:"description"`
type AgentReport struct {
TimestampMillis int64 `json:"timestamp_millis"`
Data map[string]any `json:"data"`
}
type CloudServiceConnectionStatus struct {
// For serializing from db
func (r *AgentReport) Scan(src any) error {
data, ok := src.([]byte)
if !ok {
return fmt.Errorf("tried to scan from %T instead of bytes", src)
}
return json.Unmarshal(data, &r)
}
// For serializing to db
func (r *AgentReport) Value() (driver.Value, error) {
if r == nil {
return nil, nil
}
serialized, err := json.Marshal(r)
if err != nil {
return nil, fmt.Errorf(
"couldn't serialize agent report to JSON: %w", err,
)
}
return serialized, nil
}
type AccountStatus struct {
Integration AccountIntegrationStatus `json:"integration"`
}
type AccountIntegrationStatus struct {
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
}
func (a *AccountRecord) status() AccountStatus {
status := AccountStatus{}
if a.LastAgentReport != nil {
lastHeartbeat := a.LastAgentReport.TimestampMillis
status.Integration.LastHeartbeatTsMillis = &lastHeartbeat
}
return status
}
func (a *AccountRecord) account() Account {
ca := Account{Id: a.Id, Status: a.status()}
if a.CloudAccountId != nil {
ca.CloudAccountId = *a.CloudAccountId
}
if a.Config != nil {
ca.Config = *a.Config
} else {
ca.Config = DefaultAccountConfig()
}
return ca
}
type ServiceConfig struct {
Logs *LogsConfig `json:"logs,omitempty"`
Metrics *MetricsConfig `json:"metrics,omitempty"`
}
// Serialization from db
func (c *ServiceConfig) Scan(src any) error {
data, ok := src.([]byte)
if !ok {
return fmt.Errorf("tried to scan from %T instead of bytes", src)
}
return json.Unmarshal(data, &c)
}
// Serializing to db
func (c *ServiceConfig) Value() (driver.Value, error) {
if c == nil {
return nil, nil
}
serialized, err := json.Marshal(c)
if err != nil {
return nil, fmt.Errorf(
"couldn't serialize cloud service config to JSON: %w", err,
)
}
return serialized, nil
}
type LogsConfig struct {
Enabled bool `json:"enabled"`
S3Buckets map[string][]string `json:"s3_buckets,omitempty"`
}
type MetricsConfig struct {
Enabled bool `json:"enabled"`
}
type ServiceConnectionStatus struct {
Logs *SignalConnectionStatus `json:"logs"`
Metrics *SignalConnectionStatus `json:"metrics"`
}
@@ -78,23 +180,14 @@ type SignalConnectionStatus struct {
LastReceivedFrom string `json:"last_received_from"` // resource identifier
}
type CloudTelemetryCollectionStrategy struct {
Provider string `json:"provider"`
type CompiledCollectionStrategy = services.CollectionStrategy
AWSMetrics *AWSMetricsCollectionStrategy `json:"aws_metrics,omitempty"`
AWSLogs *AWSLogsCollectionStrategy `json:"aws_logs,omitempty"`
}
func NewCloudTelemetryCollectionStrategy(provider string) (*CloudTelemetryCollectionStrategy, error) {
func NewCompiledCollectionStrategy(provider string) (*CompiledCollectionStrategy, error) {
if provider == "aws" {
return &CloudTelemetryCollectionStrategy{
Provider: "aws",
AWSMetrics: &AWSMetricsCollectionStrategy{
CloudwatchMetricsStreamFilters: []CloudwatchMetricStreamFilter{},
},
AWSLogs: &AWSLogsCollectionStrategy{
CloudwatchLogsSubscriptions: []CloudwatchLogsSubscriptionConfig{},
},
return &CompiledCollectionStrategy{
Provider: "aws",
AWSMetrics: &services.AWSMetricsStrategy{},
AWSLogs: &services.AWSLogsStrategy{},
}, nil
}
@@ -102,24 +195,27 @@ func NewCloudTelemetryCollectionStrategy(provider string) (*CloudTelemetryCollec
}
// Helper for accumulating strategies for enabled services.
func (cs *CloudTelemetryCollectionStrategy) AddServiceStrategy(
svcStrategy *CloudTelemetryCollectionStrategy,
logsEnabled bool,
metricsEnabled bool,
) error {
if svcStrategy.Provider != cs.Provider {
func AddServiceStrategy(cs *CompiledCollectionStrategy,
definitionStrat *services.CollectionStrategy, config *ServiceConfig) error {
if definitionStrat.Provider != cs.Provider {
return fmt.Errorf(
"can't add %s service strategy to strategy for %s",
svcStrategy.Provider, cs.Provider,
definitionStrat.Provider, cs.Provider,
)
}
if cs.Provider == "aws" {
if logsEnabled {
cs.AWSLogs.AddServiceStrategy(svcStrategy.AWSLogs)
if config.Logs != nil && config.Logs.Enabled && definitionStrat.AWSLogs != nil {
cs.AWSLogs.Subscriptions = append(
cs.AWSLogs.Subscriptions,
definitionStrat.AWSLogs.Subscriptions...,
)
}
if metricsEnabled {
cs.AWSMetrics.AddServiceStrategy(svcStrategy.AWSMetrics)
if config.Metrics != nil && config.Metrics.Enabled && definitionStrat.AWSMetrics != nil {
cs.AWSMetrics.StreamFilters = append(
cs.AWSMetrics.StreamFilters,
definitionStrat.AWSMetrics.StreamFilters...,
)
}
return nil
}
@@ -127,57 +223,3 @@ func (cs *CloudTelemetryCollectionStrategy) AddServiceStrategy(
return fmt.Errorf("unsupported cloud provider: %s", cs.Provider)
}
type AWSMetricsCollectionStrategy struct {
// to be used as https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-metricstream.html#cfn-cloudwatch-metricstream-includefilters
CloudwatchMetricsStreamFilters []CloudwatchMetricStreamFilter `json:"cloudwatch_metric_stream_filters"`
}
type CloudwatchMetricStreamFilter struct {
// json tags here are in the shape expected by AWS API as detailed at
// https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-metricstream-metricstreamfilter.html
Namespace string `json:"Namespace"`
MetricNames []string `json:"MetricNames,omitempty"`
}
func (amc *AWSMetricsCollectionStrategy) AddServiceStrategy(
svcStrategy *AWSMetricsCollectionStrategy,
) error {
if svcStrategy == nil {
return nil
}
amc.CloudwatchMetricsStreamFilters = append(
amc.CloudwatchMetricsStreamFilters,
svcStrategy.CloudwatchMetricsStreamFilters...,
)
return nil
}
type AWSLogsCollectionStrategy struct {
CloudwatchLogsSubscriptions []CloudwatchLogsSubscriptionConfig `json:"cloudwatch_logs_subscriptions"`
}
type CloudwatchLogsSubscriptionConfig struct {
// subscribe to all logs groups with specified prefix.
// eg: `/aws/rds/`
LogGroupNamePrefix string `json:"log_group_name_prefix"`
// https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html
// "" implies no filtering is required.
FilterPattern string `json:"filter_pattern"`
}
func (alc *AWSLogsCollectionStrategy) AddServiceStrategy(
svcStrategy *AWSLogsCollectionStrategy,
) error {
if svcStrategy == nil {
return nil
}
alc.CloudwatchLogsSubscriptions = append(
alc.CloudwatchLogsSubscriptions,
svcStrategy.CloudwatchLogsSubscriptions...,
)
return nil
}

View File

@@ -1,165 +0,0 @@
package cloudintegrations
import (
"context"
"database/sql"
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
)
type serviceConfigRepository interface {
get(
ctx context.Context,
orgID string,
cloudAccountId string,
serviceType string,
) (*types.CloudServiceConfig, *model.ApiError)
upsert(
ctx context.Context,
orgID string,
cloudProvider string,
cloudAccountId string,
serviceId string,
config types.CloudServiceConfig,
) (*types.CloudServiceConfig, *model.ApiError)
getAllForAccount(
ctx context.Context,
orgID string,
cloudAccountId string,
) (
configsBySvcId map[string]*types.CloudServiceConfig,
apiErr *model.ApiError,
)
}
func newServiceConfigRepository(store sqlstore.SQLStore) (
*serviceConfigSQLRepository, error,
) {
return &serviceConfigSQLRepository{
store: store,
}, nil
}
type serviceConfigSQLRepository struct {
store sqlstore.SQLStore
}
func (r *serviceConfigSQLRepository) get(
ctx context.Context,
orgID string,
cloudAccountId string,
serviceType string,
) (*types.CloudServiceConfig, *model.ApiError) {
var result types.CloudIntegrationService
err := r.store.BunDB().NewSelect().
Model(&result).
Join("JOIN cloud_integration ci ON ci.id = cis.cloud_integration_id").
Where("ci.org_id = ?", orgID).
Where("ci.id = ?", cloudAccountId).
Where("cis.type = ?", serviceType).
Scan(ctx)
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
"couldn't find config for cloud account %s",
cloudAccountId,
))
} else if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud service config: %w", err,
))
}
return &result.Config, nil
}
func (r *serviceConfigSQLRepository) upsert(
ctx context.Context,
orgID string,
cloudProvider string,
cloudAccountId string,
serviceId string,
config types.CloudServiceConfig,
) (*types.CloudServiceConfig, *model.ApiError) {
// get cloud integration id from account id
// if the account is not connected, we don't need to upsert the config
var cloudIntegrationId string
err := r.store.BunDB().NewSelect().
Model((*types.CloudIntegration)(nil)).
Column("id").
Where("provider = ?", cloudProvider).
Where("account_id = ?", cloudAccountId).
Where("org_id = ?", orgID).
Where("removed_at is NULL").
Where("last_agent_report is not NULL").
Scan(ctx, &cloudIntegrationId)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud integration id: %w", err,
))
}
serviceConfig := types.CloudIntegrationService{
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
Config: config,
Type: serviceId,
CloudIntegrationID: cloudIntegrationId,
}
_, err = r.store.BunDB().NewInsert().
Model(&serviceConfig).
On("conflict(cloud_integration_id, type) do update set config=excluded.config, updated_at=excluded.updated_at").
Exec(ctx)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not upsert cloud service config: %w", err,
))
}
return &serviceConfig.Config, nil
}
func (r *serviceConfigSQLRepository) getAllForAccount(
ctx context.Context,
orgID string,
cloudAccountId string,
) (map[string]*types.CloudServiceConfig, *model.ApiError) {
serviceConfigs := []types.CloudIntegrationService{}
err := r.store.BunDB().NewSelect().
Model(&serviceConfigs).
Join("JOIN cloud_integration ci ON ci.id = cis.cloud_integration_id").
Where("ci.id = ?", cloudAccountId).
Where("ci.org_id = ?", orgID).
Scan(ctx)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not query service configs from db: %w", err,
))
}
result := map[string]*types.CloudServiceConfig{}
for _, r := range serviceConfigs {
result[r.Type] = &r.Config
}
return result, nil
}

View File

@@ -0,0 +1,161 @@
package cloudintegrations
import (
"context"
"database/sql"
"fmt"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/jmoiron/sqlx"
)
type ServiceConfigDatabase interface {
get(
ctx context.Context,
cloudProvider string,
cloudAccountId string,
serviceId string,
) (*ServiceConfig, *model.ApiError)
upsert(
ctx context.Context,
cloudProvider string,
cloudAccountId string,
serviceId string,
config ServiceConfig,
) (*ServiceConfig, *model.ApiError)
getAllForAccount(
ctx context.Context,
cloudProvider string,
cloudAccountId string,
) (
configsBySvcId map[string]*ServiceConfig,
apiErr *model.ApiError,
)
}
func newServiceConfigRepository(db *sqlx.DB) (
*serviceConfigSQLRepository, error,
) {
return &serviceConfigSQLRepository{
db: db,
}, nil
}
type serviceConfigSQLRepository struct {
db *sqlx.DB
}
func (r *serviceConfigSQLRepository) get(
ctx context.Context,
cloudProvider string,
cloudAccountId string,
serviceId string,
) (*ServiceConfig, *model.ApiError) {
var result ServiceConfig
err := r.db.GetContext(
ctx, &result, `
select
config_json
from cloud_integrations_service_configs
where
cloud_provider=$1
and cloud_account_id=$2
and service_id=$3
`,
cloudProvider, cloudAccountId, serviceId,
)
if err != nil {
if err == sql.ErrNoRows {
return nil, model.NotFoundError(
fmt.Errorf("couldn't find %s %s config for %s", cloudProvider, serviceId, cloudAccountId))
}
return nil, model.InternalError(fmt.Errorf("couldn't query cloud service config: %w", err))
}
return &result, nil
}
func (r *serviceConfigSQLRepository) upsert(
ctx context.Context,
cloudProvider string,
cloudAccountId string,
serviceId string,
config ServiceConfig,
) (*ServiceConfig, *model.ApiError) {
query := `
INSERT INTO cloud_integrations_service_configs (
cloud_provider,
cloud_account_id,
service_id,
config_json
) values ($1, $2, $3, $4)
on conflict(cloud_provider, cloud_account_id, service_id)
do update set config_json=excluded.config_json
`
_, dbErr := r.db.ExecContext(
ctx, query,
cloudProvider, cloudAccountId, serviceId, &config,
)
if dbErr != nil {
return nil, model.InternalError(fmt.Errorf(
"could not upsert cloud service config: %w", dbErr,
))
}
upsertedConfig, apiErr := r.get(ctx, cloudProvider, cloudAccountId, serviceId)
if apiErr != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't fetch upserted service config: %w", apiErr.ToError(),
))
}
return upsertedConfig, nil
}
func (r *serviceConfigSQLRepository) getAllForAccount(
ctx context.Context,
cloudProvider string,
cloudAccountId string,
) (map[string]*ServiceConfig, *model.ApiError) {
type ScannedServiceConfigRecord struct {
ServiceId string `db:"service_id"`
Config ServiceConfig `db:"config_json"`
}
records := []ScannedServiceConfigRecord{}
err := r.db.SelectContext(
ctx, &records, `
select
service_id,
config_json
from cloud_integrations_service_configs
where
cloud_provider=$1
and cloud_account_id=$2
`,
cloudProvider, cloudAccountId,
)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not query service configs from db: %w", err,
))
}
result := map[string]*ServiceConfig{}
for _, r := range records {
result[r.ServiceId] = &r.Config
}
return result, nil
}

View File

@@ -0,0 +1 @@
<svg width="256" height="256" viewBox="0 0 256 256" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><title>AWS Simple Storage Service (S3)</title><defs><linearGradient x1="0%" y1="100%" x2="100%" y2="0%" id="a"><stop stop-color="#1B660F" offset="0%"/><stop stop-color="#6CAE3E" offset="100%"/></linearGradient></defs><g><rect fill="url(#a)" width="256" height="256"/><path d="M194.67488,137.25632 L195.90368,128.60352 C207.23488,135.39072 207.38208,138.19392 207.378964,138.27072 C207.35968,138.28672 205.42688,139.89952 194.67488,137.25632 L194.67488,137.25632 Z M188.45728,135.52832 C168.87328,129.60192 141.59968,117.08992 130.56288,111.87392 C130.56288,111.82912 130.57568,111.78752 130.57568,111.74272 C130.57568,107.50272 127.12608,104.05312 122.88288,104.05312 C118.64608,104.05312 115.19648,107.50272 115.19648,111.74272 C115.19648,115.98272 118.64608,119.43232 122.88288,119.43232 C124.74528,119.43232 126.43488,118.73792 127.76928,117.63392 C140.75488,123.78112 167.81728,136.11072 187.54528,141.93472 L179.74368,196.99392 C179.72128,197.14432 179.71168,197.29472 179.71168,197.44512 C179.71168,202.29312 158.24928,211.19872 123.18048,211.19872 C87.74048,211.19872 66.05088,202.29312 66.05088,197.44512 C66.05088,197.29792 66.04128,197.15392 66.02208,197.00992 L49.72128,77.94752 C63.83008,87.65952 94.17568,92.79872 123.19968,92.79872 C152.17888,92.79872 182.47328,87.67872 196.61088,77.99552 L188.45728,135.52832 Z M47.99968,65.52832 C48.23008,61.31712 72.42848,44.79872 123.19968,44.79872 C173.96448,44.79872 198.16608,61.31392 198.39968,65.52832 L198.39968,66.96512 C195.61568,76.40832 164.25568,86.39872 123.19968,86.39872 C82.07328,86.39872 50.69728,76.37632 47.99968,66.92032 L47.99968,65.52832 Z M204.79968,65.59872 C204.79968,54.51072 173.01088,38.39872 123.19968,38.39872 C73.38848,38.39872 41.59968,54.51072 41.59968,65.59872 L41.90048,68.01152 L59.65408,197.68832 C60.07968,212.19072 98.75488,217.59872 123.18048,217.59872 C153.49088,217.59872 185.69248,210.62912 186.10848,197.69792 L193.77568,143.62752 C198.04128,144.64832 201.55168,145.16992 204.37088,145.16992 C208.15648,145.16992 210.71648,144.24512 212.26848,142.39552 C213.54208,140.87872 214.02848,139.04192 213.66368,137.08672 C212.83488,132.65792 207.57728,127.88352 196.87008,121.77472 L204.47328,68.13632 L204.79968,65.59872 Z" fill="#FFFFFF"/></g></svg>

After

Width:  |  Height:  |  Size: 2.3 KiB

View File

@@ -0,0 +1,49 @@
{
"id": "s3sync",
"title": "S3 Sync",
"icon": "file://icon.svg",
"overview": "file://overview.md",
"supported_signals": {
"metrics": false,
"logs": true
},
"data_collected": {
"logs": [
{
"name": "Account ID",
"path": "resources.aws.account.id",
"type": "string"
},
{
"name": "Account Region",
"path": "resources.aws.account.region",
"type": "string"
},
{
"name": "Bucket Name",
"path": "resources.aws.bucket.name",
"type": "string"
},
{
"name": "Object Key",
"path": "attributes.aws.object.key",
"type": "string"
},
{
"name": "Object S3 Event Time",
"path": "attributes.aws.object.event_time",
"type": "string"
},
{
"name": "Log line number in source file",
"path": "attributes.log.source.line",
"type": "number"
}
]
},
"telemetry_collection_strategy": {
},
"assets": {
"dashboards": []
}
}

View File

@@ -0,0 +1,3 @@
### Sync logs stored in AWS S3 with SigNoz
Collect logs stored by AWS Services in S3 and explore them in Signoz.

View File

@@ -0,0 +1,94 @@
package services
import (
"github.com/SigNoz/signoz/pkg/types"
)
type Metadata struct {
Id string `json:"id"`
Title string `json:"title"`
Icon string `json:"icon"`
// Present only if the service has been configured in the
// context of a cloud provider account.
// Config *CloudServiceConfig `json:"config,omitempty"`
}
type Definition struct {
Metadata
Overview string `json:"overview"` // markdown
Assets Assets `json:"assets"`
SupportedSignals SupportedSignals `json:"supported_signals"`
DataCollected DataCollected `json:"data_collected"`
Strategy *CollectionStrategy `json:"telemetry_collection_strategy"`
}
type Assets struct {
Dashboards []Dashboard `json:"dashboards"`
}
type SupportedSignals struct {
Logs bool `json:"logs"`
Metrics bool `json:"metrics"`
}
type DataCollected struct {
Logs []CollectedLogAttribute `json:"logs"`
Metrics []CollectedMetric `json:"metrics"`
}
type CollectedLogAttribute struct {
Name string `json:"name"`
Path string `json:"path"`
Type string `json:"type"`
}
type CollectedMetric struct {
Name string `json:"name"`
Type string `json:"type"`
Unit string `json:"unit"`
Description string `json:"description"`
}
type CollectionStrategy struct {
Provider string `json:"provider"`
AWSMetrics *AWSMetricsStrategy `json:"aws_metrics,omitempty"`
AWSLogs *AWSLogsStrategy `json:"aws_logs,omitempty"`
}
type AWSMetricsStrategy struct {
// to be used as https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-metricstream.html#cfn-cloudwatch-metricstream-includefilters
StreamFilters []struct {
// json tags here are in the shape expected by AWS API as detailed at
// https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-metricstream-metricstreamfilter.html
Namespace string `json:"Namespace"`
MetricNames []string `json:"MetricNames,omitempty"`
} `json:"cloudwatch_metric_stream_filters"`
}
type AWSLogsStrategy struct {
Subscriptions []struct {
// subscribe to all logs groups with specified prefix.
// eg: `/aws/rds/`
LogGroupNamePrefix string `json:"log_group_name_prefix"`
// https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html
// "" implies no filtering is required.
FilterPattern string `json:"filter_pattern"`
} `json:"cloudwatch_logs_subscriptions"`
}
type Dashboard struct {
Id string `json:"id"`
Url string `json:"url"`
Title string `json:"title"`
Description string `json:"description"`
Image string `json:"image"`
Definition *types.DashboardData `json:"definition,omitempty"`
}

View File

@@ -1,4 +1,4 @@
package cloudintegrations
package services
import (
"bytes"
@@ -15,11 +15,11 @@ import (
"golang.org/x/exp/maps"
)
func listCloudProviderServices(
func List(
cloudProvider string,
) ([]CloudServiceDetails, *model.ApiError) {
cloudServices := availableServices[cloudProvider]
if cloudServices == nil {
) ([]Definition, *model.ApiError) {
cloudServices, found := supportedServices[cloudProvider]
if !found || cloudServices == nil {
return nil, model.NotFoundError(fmt.Errorf(
"unsupported cloud provider: %s", cloudProvider,
))
@@ -33,10 +33,21 @@ func listCloudProviderServices(
return services, nil
}
func getCloudProviderService(
func Map(cloudprovider string) (map[string]Definition, *model.ApiError) {
cloudServices, found := supportedServices[cloudprovider]
if !found || cloudServices == nil {
return nil, model.NotFoundError(fmt.Errorf(
"unsupported cloud provider: %s", cloudprovider,
))
}
return cloudServices, nil
}
func GetServiceDefinition(
cloudProvider string, serviceId string,
) (*CloudServiceDetails, *model.ApiError) {
cloudServices := availableServices[cloudProvider]
) (*Definition, *model.ApiError) {
cloudServices := supportedServices[cloudProvider]
if cloudServices == nil {
return nil, model.NotFoundError(fmt.Errorf(
"unsupported cloud provider: %s", cloudProvider,
@@ -57,7 +68,7 @@ func getCloudProviderService(
// Service details read from ./serviceDefinitions
// { "providerName": { "service_id": {...}} }
var availableServices map[string]map[string]CloudServiceDetails
var supportedServices map[string]map[string]Definition
func init() {
err := readAllServiceDefinitions()
@@ -68,15 +79,15 @@ func init() {
}
}
//go:embed serviceDefinitions/*
var serviceDefinitionFiles embed.FS
//go:embed definitions/*
var definitionFiles embed.FS
func readAllServiceDefinitions() error {
availableServices = map[string]map[string]CloudServiceDetails{}
supportedServices = map[string]map[string]Definition{}
rootDirName := "serviceDefinitions"
rootDirName := "definitions"
cloudProviderDirs, err := fs.ReadDir(serviceDefinitionFiles, rootDirName)
cloudProviderDirs, err := fs.ReadDir(definitionFiles, rootDirName)
if err != nil {
return fmt.Errorf("couldn't read dirs in %s: %w", rootDirName, err)
}
@@ -98,21 +109,21 @@ func readAllServiceDefinitions() error {
return fmt.Errorf("no %s services could be read", cloudProvider)
}
availableServices[cloudProvider] = cloudServices
supportedServices[cloudProvider] = cloudServices
}
return nil
}
func readServiceDefinitionsFromDir(cloudProvider string, cloudProviderDirPath string) (
map[string]CloudServiceDetails, error,
map[string]Definition, error,
) {
svcDefDirs, err := fs.ReadDir(serviceDefinitionFiles, cloudProviderDirPath)
svcDefDirs, err := fs.ReadDir(definitionFiles, cloudProviderDirPath)
if err != nil {
return nil, fmt.Errorf("couldn't list integrations dirs: %w", err)
}
svcDefs := map[string]CloudServiceDetails{}
svcDefs := map[string]Definition{}
for _, d := range svcDefDirs {
if !d.IsDir() {
@@ -137,10 +148,10 @@ func readServiceDefinitionsFromDir(cloudProvider string, cloudProviderDirPath st
return svcDefs, nil
}
func readServiceDefinition(cloudProvider string, svcDirpath string) (*CloudServiceDetails, error) {
func readServiceDefinition(cloudProvider string, svcDirpath string) (*Definition, error) {
integrationJsonPath := path.Join(svcDirpath, "integration.json")
serializedSpec, err := serviceDefinitionFiles.ReadFile(integrationJsonPath)
serializedSpec, err := definitionFiles.ReadFile(integrationJsonPath)
if err != nil {
return nil, fmt.Errorf(
"couldn't find integration.json in %s: %w",
@@ -157,7 +168,7 @@ func readServiceDefinition(cloudProvider string, svcDirpath string) (*CloudServi
}
hydrated, err := integrations.HydrateFileUris(
integrationSpec, serviceDefinitionFiles, svcDirpath,
integrationSpec, definitionFiles, svcDirpath,
)
if err != nil {
return nil, fmt.Errorf(
@@ -167,7 +178,7 @@ func readServiceDefinition(cloudProvider string, svcDirpath string) (*CloudServi
}
hydratedSpec := hydrated.(map[string]any)
serviceDef, err := ParseStructWithJsonTagsFromMap[CloudServiceDetails](hydratedSpec)
serviceDef, err := ParseStructWithJsonTagsFromMap[Definition](hydratedSpec)
if err != nil {
return nil, fmt.Errorf(
"couldn't parse hydrated JSON spec read from %s: %w",
@@ -180,13 +191,13 @@ func readServiceDefinition(cloudProvider string, svcDirpath string) (*CloudServi
return nil, fmt.Errorf("invalid service definition %s: %w", serviceDef.Id, err)
}
serviceDef.TelemetryCollectionStrategy.Provider = cloudProvider
serviceDef.Strategy.Provider = cloudProvider
return serviceDef, nil
}
func validateServiceDefinition(s *CloudServiceDetails) error {
func validateServiceDefinition(s *Definition) error {
// Validate dashboard data
seenDashboardIds := map[string]interface{}{}
for _, dd := range s.Assets.Dashboards {
@@ -196,7 +207,7 @@ func validateServiceDefinition(s *CloudServiceDetails) error {
seenDashboardIds[dd.Id] = nil
}
if s.TelemetryCollectionStrategy == nil {
if s.Strategy == nil {
return fmt.Errorf("telemetry_collection_strategy is required")
}

View File

@@ -1,4 +1,4 @@
package cloudintegrations
package services
import (
"testing"
@@ -11,22 +11,22 @@ func TestAvailableServices(t *testing.T) {
require := require.New(t)
// should be able to list available services.
_, apiErr := listCloudProviderServices("bad-cloud-provider")
_, apiErr := List("bad-cloud-provider")
require.NotNil(apiErr)
require.Equal(model.ErrorNotFound, apiErr.Type())
awsSvcs, apiErr := listCloudProviderServices("aws")
awsSvcs, apiErr := List("aws")
require.Nil(apiErr)
require.Greater(len(awsSvcs), 0)
// should be able to get details of a service
_, apiErr = getCloudProviderService(
_, apiErr = GetServiceDefinition(
"aws", "bad-service-id",
)
require.NotNil(apiErr)
require.Equal(model.ErrorNotFound, apiErr.Type())
svc, apiErr := getCloudProviderService(
svc, apiErr := GetServiceDefinition(
"aws", awsSvcs[0].Id,
)
require.Nil(apiErr)

View File

@@ -22,7 +22,7 @@ import (
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
"github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -38,6 +38,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
"github.com/SigNoz/signoz/pkg/query-service/app/explorer"
"github.com/SigNoz/signoz/pkg/query-service/app/inframetrics"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
queues2 "github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/queues"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations/thirdPartyApi"
"github.com/SigNoz/signoz/pkg/query-service/app/logs"
@@ -1082,14 +1083,14 @@ func (aH *APIHandler) getDashboards(w http.ResponseWriter, r *http.Request) {
}
ic := aH.IntegrationsController
installedIntegrationDashboards, err := ic.GetDashboardsForInstalledIntegrations(r.Context(), claims.OrgID)
installedIntegrationDashboards, err := ic.GetDashboardsForInstalledIntegrations(r.Context())
if err != nil {
zap.L().Error("failed to get dashboards for installed integrations", zap.Error(err))
} else {
allDashboards = append(allDashboards, installedIntegrationDashboards...)
}
cloudIntegrationDashboards, err := aH.CloudIntegrationsController.AvailableDashboards(r.Context(), claims.OrgID)
cloudIntegrationDashboards, err := aH.CloudIntegrationsController.AvailableDashboards(r.Context())
if err != nil {
zap.L().Error("failed to get cloud dashboards", zap.Error(err))
} else {
@@ -1267,7 +1268,7 @@ func (aH *APIHandler) getDashboard(w http.ResponseWriter, r *http.Request) {
if aH.CloudIntegrationsController.IsCloudIntegrationDashboardUuid(uuid) {
dashboard, apiError = aH.CloudIntegrationsController.GetDashboardById(
r.Context(), claims.OrgID, uuid,
r.Context(), uuid,
)
if apiError != nil {
RespondError(w, apiError, nil)
@@ -1276,7 +1277,7 @@ func (aH *APIHandler) getDashboard(w http.ResponseWriter, r *http.Request) {
} else {
dashboard, apiError = aH.IntegrationsController.GetInstalledIntegrationDashboardById(
r.Context(), claims.OrgID, uuid,
r.Context(), uuid,
)
if apiError != nil {
RespondError(w, apiError, nil)
@@ -2207,11 +2208,6 @@ func (aH *APIHandler) editUser(w http.ResponseWriter, r *http.Request) {
old.ProfilePictureURL = update.ProfilePictureURL
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(old.Email)) {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user cannot be updated"))
return
}
_, apiErr = dao.DB().EditUser(ctx, &types.User{
ID: old.ID,
Name: old.Name,
@@ -2243,11 +2239,6 @@ func (aH *APIHandler) deleteUser(w http.ResponseWriter, r *http.Request) {
return
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email)) {
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user cannot be updated"))
return
}
if user == nil {
RespondError(w, &model.ApiError{
Typ: model.ErrorNotFound,
@@ -3507,14 +3498,9 @@ func (aH *APIHandler) ListIntegrations(
for k, values := range r.URL.Query() {
params[k] = values[0]
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
resp, apiErr := aH.IntegrationsController.ListIntegrations(
r.Context(), claims.OrgID, params,
r.Context(), params,
)
if apiErr != nil {
RespondError(w, apiErr, "Failed to fetch integrations")
@@ -3527,13 +3513,8 @@ func (aH *APIHandler) GetIntegration(
w http.ResponseWriter, r *http.Request,
) {
integrationId := mux.Vars(r)["integrationId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
integration, apiErr := aH.IntegrationsController.GetIntegration(
r.Context(), claims.OrgID, integrationId,
r.Context(), integrationId,
)
if apiErr != nil {
RespondError(w, apiErr, "Failed to fetch integration details")
@@ -3547,13 +3528,8 @@ func (aH *APIHandler) GetIntegrationConnectionStatus(
w http.ResponseWriter, r *http.Request,
) {
integrationId := mux.Vars(r)["integrationId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
isInstalled, apiErr := aH.IntegrationsController.IsIntegrationInstalled(
r.Context(), claims.OrgID, integrationId,
r.Context(), integrationId,
)
if apiErr != nil {
RespondError(w, apiErr, "failed to check if integration is installed")
@@ -3567,7 +3543,7 @@ func (aH *APIHandler) GetIntegrationConnectionStatus(
}
connectionTests, apiErr := aH.IntegrationsController.GetIntegrationConnectionTests(
r.Context(), claims.OrgID, integrationId,
r.Context(), integrationId,
)
if apiErr != nil {
RespondError(w, apiErr, "failed to fetch integration connection tests")
@@ -3766,14 +3742,8 @@ func (aH *APIHandler) InstallIntegration(
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
integration, apiErr := aH.IntegrationsController.Install(
r.Context(), claims.OrgID, &req,
r.Context(), &req,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
@@ -3794,13 +3764,7 @@ func (aH *APIHandler) UninstallIntegration(
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
apiErr := aH.IntegrationsController.Uninstall(r.Context(), claims.OrgID, &req)
apiErr := aH.IntegrationsController.Uninstall(r.Context(), &req)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
@@ -3856,14 +3820,8 @@ func (aH *APIHandler) CloudIntegrationsListConnectedAccounts(
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
resp, apiErr := aH.CloudIntegrationsController.ListConnectedAccounts(
r.Context(), claims.OrgID, cloudProvider,
r.Context(), cloudProvider,
)
if apiErr != nil {
@@ -3884,14 +3842,8 @@ func (aH *APIHandler) CloudIntegrationsGenerateConnectionUrl(
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
result, apiErr := aH.CloudIntegrationsController.GenerateConnectionUrl(
r.Context(), claims.OrgID, cloudProvider, req,
r.Context(), cloudProvider, req,
)
if apiErr != nil {
@@ -3908,14 +3860,8 @@ func (aH *APIHandler) CloudIntegrationsGetAccountStatus(
cloudProvider := mux.Vars(r)["cloudProvider"]
accountId := mux.Vars(r)["accountId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
resp, apiErr := aH.CloudIntegrationsController.GetAccountStatus(
r.Context(), claims.OrgID, cloudProvider, accountId,
r.Context(), cloudProvider, accountId,
)
if apiErr != nil {
@@ -3936,14 +3882,8 @@ func (aH *APIHandler) CloudIntegrationsAgentCheckIn(
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
result, apiErr := aH.CloudIntegrationsController.CheckInAsAgent(
r.Context(), claims.OrgID, cloudProvider, req,
r.Context(), cloudProvider, req,
)
if apiErr != nil {
@@ -3966,14 +3906,8 @@ func (aH *APIHandler) CloudIntegrationsUpdateAccountConfig(
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
result, apiErr := aH.CloudIntegrationsController.UpdateAccountConfig(
r.Context(), claims.OrgID, cloudProvider, accountId, req,
r.Context(), cloudProvider, accountId, req,
)
if apiErr != nil {
@@ -3990,14 +3924,8 @@ func (aH *APIHandler) CloudIntegrationsDisconnectAccount(
cloudProvider := mux.Vars(r)["cloudProvider"]
accountId := mux.Vars(r)["accountId"]
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
result, apiErr := aH.CloudIntegrationsController.DisconnectAccount(
r.Context(), claims.OrgID, cloudProvider, accountId,
r.Context(), cloudProvider, accountId,
)
if apiErr != nil {
@@ -4020,14 +3948,8 @@ func (aH *APIHandler) CloudIntegrationsListServices(
cloudAccountId = &cloudAccountIdQP
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
resp, apiErr := aH.CloudIntegrationsController.ListServices(
r.Context(), claims.OrgID, cloudProvider, cloudAccountId,
r.Context(), cloudProvider, cloudAccountId,
)
if apiErr != nil {
@@ -4050,14 +3972,8 @@ func (aH *APIHandler) CloudIntegrationsGetServiceDetails(
cloudAccountId = &cloudAccountIdQP
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
resp, apiErr := aH.CloudIntegrationsController.GetServiceDetails(
r.Context(), claims.OrgID, cloudProvider, serviceId, cloudAccountId,
r.Context(), cloudProvider, serviceId, cloudAccountId,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
@@ -4083,8 +3999,8 @@ func (aH *APIHandler) calculateCloudIntegrationServiceConnectionStatus(
ctx context.Context,
cloudProvider string,
cloudAccountId string,
svcDetails *cloudintegrations.CloudServiceDetails,
) (*cloudintegrations.CloudServiceConnectionStatus, *model.ApiError) {
svcDetails *cloudintegrations.ServiceDetails,
) (*cloudintegrations.ServiceConnectionStatus, *model.ApiError) {
if cloudProvider != "aws" {
// TODO(Raj): Make connection check generic for all providers in a follow up change
return nil, model.BadRequest(
@@ -4092,14 +4008,14 @@ func (aH *APIHandler) calculateCloudIntegrationServiceConnectionStatus(
)
}
telemetryCollectionStrategy := svcDetails.TelemetryCollectionStrategy
telemetryCollectionStrategy := svcDetails.Strategy
if telemetryCollectionStrategy == nil {
return nil, model.InternalError(fmt.Errorf(
"service doesn't have telemetry collection strategy: %s", svcDetails.Id,
))
}
result := &cloudintegrations.CloudServiceConnectionStatus{}
result := &cloudintegrations.ServiceConnectionStatus{}
errors := []*model.ApiError{}
var resultLock sync.Mutex
@@ -4159,10 +4075,10 @@ func (aH *APIHandler) calculateCloudIntegrationServiceConnectionStatus(
func (aH *APIHandler) calculateAWSIntegrationSvcMetricsConnectionStatus(
ctx context.Context,
cloudAccountId string,
strategy *cloudintegrations.AWSMetricsCollectionStrategy,
metricsCollectedBySvc []cloudintegrations.CollectedMetric,
strategy *services.AWSMetricsStrategy,
metricsCollectedBySvc []services.CollectedMetric,
) (*cloudintegrations.SignalConnectionStatus, *model.ApiError) {
if strategy == nil || len(strategy.CloudwatchMetricsStreamFilters) < 1 {
if strategy == nil || len(strategy.StreamFilters) < 1 {
return nil, nil
}
@@ -4171,7 +4087,7 @@ func (aH *APIHandler) calculateAWSIntegrationSvcMetricsConnectionStatus(
"cloud_account_id": cloudAccountId,
}
metricsNamespace := strategy.CloudwatchMetricsStreamFilters[0].Namespace
metricsNamespace := strategy.StreamFilters[0].Namespace
metricsNamespaceParts := strings.Split(metricsNamespace, "/")
if len(metricsNamespaceParts) >= 2 {
@@ -4208,13 +4124,13 @@ func (aH *APIHandler) calculateAWSIntegrationSvcMetricsConnectionStatus(
func (aH *APIHandler) calculateAWSIntegrationSvcLogsConnectionStatus(
ctx context.Context,
cloudAccountId string,
strategy *cloudintegrations.AWSLogsCollectionStrategy,
strategy *services.AWSLogsStrategy,
) (*cloudintegrations.SignalConnectionStatus, *model.ApiError) {
if strategy == nil || len(strategy.CloudwatchLogsSubscriptions) < 1 {
if strategy == nil || len(strategy.Subscriptions) < 1 {
return nil, nil
}
logGroupNamePrefix := strategy.CloudwatchLogsSubscriptions[0].LogGroupNamePrefix
logGroupNamePrefix := strategy.Subscriptions[0].LogGroupNamePrefix
if len(logGroupNamePrefix) < 1 {
return nil, nil
}
@@ -4296,14 +4212,8 @@ func (aH *APIHandler) CloudIntegrationsUpdateServiceConfig(
return
}
claims, ok := authtypes.ClaimsFromContext(r.Context())
if !ok {
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
return
}
result, apiErr := aH.CloudIntegrationsController.UpdateServiceConfig(
r.Context(), claims.OrgID, cloudProvider, serviceId, req,
r.Context(), cloudProvider, serviceId, req,
)
if apiErr != nil {

View File

@@ -18,7 +18,7 @@ type Controller struct {
func NewController(sqlStore sqlstore.SQLStore) (
*Controller, error,
) {
mgr, err := NewManager(sqlStore)
mgr, err := NewManager(sqlStore.SQLxDB())
if err != nil {
return nil, fmt.Errorf("couldn't create integrations manager: %w", err)
}
@@ -35,7 +35,7 @@ type IntegrationsListResponse struct {
}
func (c *Controller) ListIntegrations(
ctx context.Context, orgId string, params map[string]string,
ctx context.Context, params map[string]string,
) (
*IntegrationsListResponse, *model.ApiError,
) {
@@ -47,7 +47,7 @@ func (c *Controller) ListIntegrations(
}
}
integrations, apiErr := c.mgr.ListIntegrations(ctx, orgId, filters)
integrations, apiErr := c.mgr.ListIntegrations(ctx, filters)
if apiErr != nil {
return nil, apiErr
}
@@ -58,15 +58,16 @@ func (c *Controller) ListIntegrations(
}
func (c *Controller) GetIntegration(
ctx context.Context, orgId string, integrationId string,
ctx context.Context, integrationId string,
) (*Integration, *model.ApiError) {
return c.mgr.GetIntegration(ctx, orgId, integrationId)
return c.mgr.GetIntegration(ctx, integrationId)
}
func (c *Controller) IsIntegrationInstalled(
ctx context.Context, orgId string, integrationId string,
ctx context.Context,
integrationId string,
) (bool, *model.ApiError) {
installation, apiErr := c.mgr.getInstalledIntegration(ctx, orgId, integrationId)
installation, apiErr := c.mgr.getInstalledIntegration(ctx, integrationId)
if apiErr != nil {
return false, apiErr
}
@@ -75,9 +76,9 @@ func (c *Controller) IsIntegrationInstalled(
}
func (c *Controller) GetIntegrationConnectionTests(
ctx context.Context, orgId string, integrationId string,
ctx context.Context, integrationId string,
) (*IntegrationConnectionTests, *model.ApiError) {
return c.mgr.GetIntegrationConnectionTests(ctx, orgId, integrationId)
return c.mgr.GetIntegrationConnectionTests(ctx, integrationId)
}
type InstallIntegrationRequest struct {
@@ -86,10 +87,10 @@ type InstallIntegrationRequest struct {
}
func (c *Controller) Install(
ctx context.Context, orgId string, req *InstallIntegrationRequest,
ctx context.Context, req *InstallIntegrationRequest,
) (*IntegrationsListItem, *model.ApiError) {
res, apiErr := c.mgr.InstallIntegration(
ctx, orgId, req.IntegrationId, req.Config,
ctx, req.IntegrationId, req.Config,
)
if apiErr != nil {
return nil, apiErr
@@ -103,7 +104,7 @@ type UninstallIntegrationRequest struct {
}
func (c *Controller) Uninstall(
ctx context.Context, orgId string, req *UninstallIntegrationRequest,
ctx context.Context, req *UninstallIntegrationRequest,
) *model.ApiError {
if len(req.IntegrationId) < 1 {
return model.BadRequest(fmt.Errorf(
@@ -112,7 +113,7 @@ func (c *Controller) Uninstall(
}
apiErr := c.mgr.UninstallIntegration(
ctx, orgId, req.IntegrationId,
ctx, req.IntegrationId,
)
if apiErr != nil {
return apiErr
@@ -122,19 +123,19 @@ func (c *Controller) Uninstall(
}
func (c *Controller) GetPipelinesForInstalledIntegrations(
ctx context.Context, orgId string,
ctx context.Context,
) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
return c.mgr.GetPipelinesForInstalledIntegrations(ctx, orgId)
return c.mgr.GetPipelinesForInstalledIntegrations(ctx)
}
func (c *Controller) GetDashboardsForInstalledIntegrations(
ctx context.Context, orgId string,
ctx context.Context,
) ([]types.Dashboard, *model.ApiError) {
return c.mgr.GetDashboardsForInstalledIntegrations(ctx, orgId)
return c.mgr.GetDashboardsForInstalledIntegrations(ctx)
}
func (c *Controller) GetInstalledIntegrationDashboardById(
ctx context.Context, orgId string, dashboardUuid string,
ctx context.Context, dashboardUuid string,
) (*types.Dashboard, *model.ApiError) {
return c.mgr.GetInstalledIntegrationDashboardById(ctx, orgId, dashboardUuid)
return c.mgr.GetInstalledIntegrationDashboardById(ctx, dashboardUuid)
}

View File

@@ -5,14 +5,15 @@ import (
"fmt"
"slices"
"strings"
"time"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/jmoiron/sqlx"
)
type IntegrationAuthor struct {
@@ -104,9 +105,16 @@ type IntegrationsListItem struct {
IsInstalled bool `json:"is_installed"`
}
type InstalledIntegration struct {
IntegrationId string `json:"integration_id" db:"integration_id"`
Config InstalledIntegrationConfig `json:"config_json" db:"config_json"`
InstalledAt time.Time `json:"installed_at" db:"installed_at"`
}
type InstalledIntegrationConfig map[string]interface{}
type Integration struct {
IntegrationDetails
Installation *types.InstalledIntegration `json:"installation"`
Installation *InstalledIntegration `json:"installation"`
}
type Manager struct {
@@ -114,8 +122,8 @@ type Manager struct {
installedIntegrationsRepo InstalledIntegrationsRepo
}
func NewManager(store sqlstore.SQLStore) (*Manager, error) {
iiRepo, err := NewInstalledIntegrationsSqliteRepo(store)
func NewManager(db *sqlx.DB) (*Manager, error) {
iiRepo, err := NewInstalledIntegrationsSqliteRepo(db)
if err != nil {
return nil, fmt.Errorf(
"could not init sqlite DB for installed integrations: %w", err,
@@ -134,7 +142,6 @@ type IntegrationsFilter struct {
func (m *Manager) ListIntegrations(
ctx context.Context,
orgId string,
filter *IntegrationsFilter,
// Expected to have pagination over time.
) ([]IntegrationsListItem, *model.ApiError) {
@@ -145,22 +152,22 @@ func (m *Manager) ListIntegrations(
)
}
installed, apiErr := m.installedIntegrationsRepo.list(ctx, orgId)
installed, apiErr := m.installedIntegrationsRepo.list(ctx)
if apiErr != nil {
return nil, model.WrapApiError(
apiErr, "could not fetch installed integrations",
)
}
installedTypes := []string{}
installedIds := []string{}
for _, ii := range installed {
installedTypes = append(installedTypes, ii.Type)
installedIds = append(installedIds, ii.IntegrationId)
}
result := []IntegrationsListItem{}
for _, ai := range available {
result = append(result, IntegrationsListItem{
IntegrationSummary: ai.IntegrationSummary,
IsInstalled: slices.Contains(installedTypes, ai.Id),
IsInstalled: slices.Contains(installedIds, ai.Id),
})
}
@@ -181,7 +188,6 @@ func (m *Manager) ListIntegrations(
func (m *Manager) GetIntegration(
ctx context.Context,
orgId string,
integrationId string,
) (*Integration, *model.ApiError) {
integrationDetails, apiErr := m.getIntegrationDetails(
@@ -192,7 +198,7 @@ func (m *Manager) GetIntegration(
}
installation, apiErr := m.getInstalledIntegration(
ctx, orgId, integrationId,
ctx, integrationId,
)
if apiErr != nil {
return nil, apiErr
@@ -206,7 +212,6 @@ func (m *Manager) GetIntegration(
func (m *Manager) GetIntegrationConnectionTests(
ctx context.Context,
orgId string,
integrationId string,
) (*IntegrationConnectionTests, *model.ApiError) {
integrationDetails, apiErr := m.getIntegrationDetails(
@@ -220,9 +225,8 @@ func (m *Manager) GetIntegrationConnectionTests(
func (m *Manager) InstallIntegration(
ctx context.Context,
orgId string,
integrationId string,
config types.InstalledIntegrationConfig,
config InstalledIntegrationConfig,
) (*IntegrationsListItem, *model.ApiError) {
integrationDetails, apiErr := m.getIntegrationDetails(ctx, integrationId)
if apiErr != nil {
@@ -230,7 +234,7 @@ func (m *Manager) InstallIntegration(
}
_, apiErr = m.installedIntegrationsRepo.upsert(
ctx, orgId, integrationId, config,
ctx, integrationId, config,
)
if apiErr != nil {
return nil, model.WrapApiError(
@@ -246,17 +250,15 @@ func (m *Manager) InstallIntegration(
func (m *Manager) UninstallIntegration(
ctx context.Context,
orgId string,
integrationId string,
) *model.ApiError {
return m.installedIntegrationsRepo.delete(ctx, orgId, integrationId)
return m.installedIntegrationsRepo.delete(ctx, integrationId)
}
func (m *Manager) GetPipelinesForInstalledIntegrations(
ctx context.Context,
orgId string,
) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx, orgId)
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx)
if apiErr != nil {
return nil, apiErr
}
@@ -306,7 +308,6 @@ func (m *Manager) parseDashboardUuid(dashboardUuid string) (
func (m *Manager) GetInstalledIntegrationDashboardById(
ctx context.Context,
orgId string,
dashboardUuid string,
) (*types.Dashboard, *model.ApiError) {
integrationId, dashboardId, apiErr := m.parseDashboardUuid(dashboardUuid)
@@ -314,7 +315,7 @@ func (m *Manager) GetInstalledIntegrationDashboardById(
return nil, apiErr
}
integration, apiErr := m.GetIntegration(ctx, orgId, integrationId)
integration, apiErr := m.GetIntegration(ctx, integrationId)
if apiErr != nil {
return nil, apiErr
}
@@ -354,9 +355,8 @@ func (m *Manager) GetInstalledIntegrationDashboardById(
func (m *Manager) GetDashboardsForInstalledIntegrations(
ctx context.Context,
orgId string,
) ([]types.Dashboard, *model.ApiError) {
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx, orgId)
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx)
if apiErr != nil {
return nil, apiErr
}
@@ -421,11 +421,10 @@ func (m *Manager) getIntegrationDetails(
func (m *Manager) getInstalledIntegration(
ctx context.Context,
orgId string,
integrationId string,
) (*types.InstalledIntegration, *model.ApiError) {
) (*InstalledIntegration, *model.ApiError) {
iis, apiErr := m.installedIntegrationsRepo.get(
ctx, orgId, []string{integrationId},
ctx, []string{integrationId},
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, fmt.Sprintf(
@@ -442,33 +441,32 @@ func (m *Manager) getInstalledIntegration(
func (m *Manager) getInstalledIntegrations(
ctx context.Context,
orgId string,
) (
map[string]Integration, *model.ApiError,
) {
installations, apiErr := m.installedIntegrationsRepo.list(ctx, orgId)
installations, apiErr := m.installedIntegrationsRepo.list(ctx)
if apiErr != nil {
return nil, apiErr
}
installedTypes := utils.MapSlice(installations, func(i types.InstalledIntegration) string {
return i.Type
installedIds := utils.MapSlice(installations, func(i InstalledIntegration) string {
return i.IntegrationId
})
integrationDetails, apiErr := m.availableIntegrationsRepo.get(ctx, installedTypes)
integrationDetails, apiErr := m.availableIntegrationsRepo.get(ctx, installedIds)
if apiErr != nil {
return nil, apiErr
}
result := map[string]Integration{}
for _, ii := range installations {
iDetails, exists := integrationDetails[ii.Type]
iDetails, exists := integrationDetails[ii.IntegrationId]
if !exists {
return nil, model.InternalError(fmt.Errorf(
"couldn't find integration details for %s", ii.Type,
"couldn't find integration details for %s", ii.IntegrationId,
))
}
result[ii.Type] = Integration{
result[ii.IntegrationId] = Integration{
Installation: &ii,
IntegrationDetails: iDetails,
}

View File

@@ -14,23 +14,18 @@ func TestIntegrationLifecycle(t *testing.T) {
mgr := NewTestIntegrationsManager(t)
ctx := context.Background()
user, apiErr := createTestUser()
if apiErr != nil {
t.Fatalf("could not create test user: %v", apiErr)
}
ii := true
installedIntegrationsFilter := &IntegrationsFilter{
IsInstalled: &ii,
}
installedIntegrations, apiErr := mgr.ListIntegrations(
ctx, user.OrgID, installedIntegrationsFilter,
ctx, installedIntegrationsFilter,
)
require.Nil(apiErr)
require.Equal([]IntegrationsListItem{}, installedIntegrations)
availableIntegrations, apiErr := mgr.ListIntegrations(ctx, user.OrgID, nil)
availableIntegrations, apiErr := mgr.ListIntegrations(ctx, nil)
require.Nil(apiErr)
require.Equal(2, len(availableIntegrations))
require.False(availableIntegrations[0].IsInstalled)
@@ -38,44 +33,44 @@ func TestIntegrationLifecycle(t *testing.T) {
testIntegrationConfig := map[string]interface{}{}
installed, apiErr := mgr.InstallIntegration(
ctx, user.OrgID, availableIntegrations[1].Id, testIntegrationConfig,
ctx, availableIntegrations[1].Id, testIntegrationConfig,
)
require.Nil(apiErr)
require.Equal(installed.Id, availableIntegrations[1].Id)
integration, apiErr := mgr.GetIntegration(ctx, user.OrgID, availableIntegrations[1].Id)
integration, apiErr := mgr.GetIntegration(ctx, availableIntegrations[1].Id)
require.Nil(apiErr)
require.Equal(integration.Id, availableIntegrations[1].Id)
require.NotNil(integration.Installation)
installedIntegrations, apiErr = mgr.ListIntegrations(
ctx, user.OrgID, installedIntegrationsFilter,
ctx, installedIntegrationsFilter,
)
require.Nil(apiErr)
require.Equal(1, len(installedIntegrations))
require.Equal(availableIntegrations[1].Id, installedIntegrations[0].Id)
availableIntegrations, apiErr = mgr.ListIntegrations(ctx, user.OrgID, nil)
availableIntegrations, apiErr = mgr.ListIntegrations(ctx, nil)
require.Nil(apiErr)
require.Equal(2, len(availableIntegrations))
require.False(availableIntegrations[0].IsInstalled)
require.True(availableIntegrations[1].IsInstalled)
apiErr = mgr.UninstallIntegration(ctx, user.OrgID, installed.Id)
apiErr = mgr.UninstallIntegration(ctx, installed.Id)
require.Nil(apiErr)
integration, apiErr = mgr.GetIntegration(ctx, user.OrgID, availableIntegrations[1].Id)
integration, apiErr = mgr.GetIntegration(ctx, availableIntegrations[1].Id)
require.Nil(apiErr)
require.Equal(integration.Id, availableIntegrations[1].Id)
require.Nil(integration.Installation)
installedIntegrations, apiErr = mgr.ListIntegrations(
ctx, user.OrgID, installedIntegrationsFilter,
ctx, installedIntegrationsFilter,
)
require.Nil(apiErr)
require.Equal(0, len(installedIntegrations))
availableIntegrations, apiErr = mgr.ListIntegrations(ctx, user.OrgID, nil)
availableIntegrations, apiErr = mgr.ListIntegrations(ctx, nil)
require.Nil(apiErr)
require.Equal(2, len(availableIntegrations))
require.False(availableIntegrations[0].IsInstalled)

View File

@@ -2,33 +2,51 @@ package integrations
import (
"context"
"database/sql/driver"
"encoding/json"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
"github.com/pkg/errors"
)
// For serializing from db
func (c *InstalledIntegrationConfig) Scan(src interface{}) error {
if data, ok := src.([]byte); ok {
return json.Unmarshal(data, &c)
}
return nil
}
// For serializing to db
func (c *InstalledIntegrationConfig) Value() (driver.Value, error) {
filterSetJson, err := json.Marshal(c)
if err != nil {
return nil, errors.Wrap(err, "could not serialize integration config to JSON")
}
return filterSetJson, nil
}
type InstalledIntegrationsRepo interface {
list(ctx context.Context, orgId string) ([]types.InstalledIntegration, *model.ApiError)
list(context.Context) ([]InstalledIntegration, *model.ApiError)
get(
ctx context.Context, orgId string, integrationTypes []string,
) (map[string]types.InstalledIntegration, *model.ApiError)
ctx context.Context, integrationIds []string,
) (map[string]InstalledIntegration, *model.ApiError)
upsert(
ctx context.Context,
orgId string,
integrationType string,
config types.InstalledIntegrationConfig,
) (*types.InstalledIntegration, *model.ApiError)
integrationId string,
config InstalledIntegrationConfig,
) (*InstalledIntegration, *model.ApiError)
delete(ctx context.Context, orgId string, integrationType string) *model.ApiError
delete(ctx context.Context, integrationId string) *model.ApiError
}
type AvailableIntegrationsRepo interface {
list(context.Context) ([]IntegrationDetails, *model.ApiError)
get(
ctx context.Context, integrationTypes []string,
ctx context.Context, integrationIds []string,
) (map[string]IntegrationDetails, *model.ApiError)
// AvailableIntegrationsRepo implementations are expected to cache

View File

@@ -3,37 +3,39 @@ package integrations
import (
"context"
"fmt"
"strings"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
"github.com/jmoiron/sqlx"
)
type InstalledIntegrationsSqliteRepo struct {
store sqlstore.SQLStore
db *sqlx.DB
}
func NewInstalledIntegrationsSqliteRepo(store sqlstore.SQLStore) (
func NewInstalledIntegrationsSqliteRepo(db *sqlx.DB) (
*InstalledIntegrationsSqliteRepo, error,
) {
return &InstalledIntegrationsSqliteRepo{
store: store,
db: db,
}, nil
}
func (r *InstalledIntegrationsSqliteRepo) list(
ctx context.Context,
orgId string,
) ([]types.InstalledIntegration, *model.ApiError) {
integrations := []types.InstalledIntegration{}
) ([]InstalledIntegration, *model.ApiError) {
integrations := []InstalledIntegration{}
err := r.store.BunDB().NewSelect().
Model(&integrations).
Where("org_id = ?", orgId).
Order("installed_at").
Scan(ctx)
err := r.db.SelectContext(
ctx, &integrations, `
select
integration_id,
config_json,
installed_at
from integrations_installed
order by installed_at
`,
)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not query installed integrations: %w", err,
@@ -43,28 +45,38 @@ func (r *InstalledIntegrationsSqliteRepo) list(
}
func (r *InstalledIntegrationsSqliteRepo) get(
ctx context.Context, orgId string, integrationTypes []string,
) (map[string]types.InstalledIntegration, *model.ApiError) {
integrations := []types.InstalledIntegration{}
ctx context.Context, integrationIds []string,
) (map[string]InstalledIntegration, *model.ApiError) {
integrations := []InstalledIntegration{}
typeValues := []interface{}{}
for _, integrationType := range integrationTypes {
typeValues = append(typeValues, integrationType)
idPlaceholders := []string{}
idValues := []interface{}{}
for _, id := range integrationIds {
idPlaceholders = append(idPlaceholders, "?")
idValues = append(idValues, id)
}
err := r.store.BunDB().NewSelect().Model(&integrations).
Where("org_id = ?", orgId).
Where("type IN (?)", bun.In(typeValues)).
Scan(ctx)
err := r.db.SelectContext(
ctx, &integrations, fmt.Sprintf(`
select
integration_id,
config_json,
installed_at
from integrations_installed
where integration_id in (%s)`,
strings.Join(idPlaceholders, ", "),
),
idValues...,
)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not query installed integrations: %w", err,
))
}
result := map[string]types.InstalledIntegration{}
result := map[string]InstalledIntegration{}
for _, ii := range integrations {
result[ii.Type] = ii
result[ii.IntegrationId] = ii
}
return result, nil
@@ -72,57 +84,55 @@ func (r *InstalledIntegrationsSqliteRepo) get(
func (r *InstalledIntegrationsSqliteRepo) upsert(
ctx context.Context,
orgId string,
integrationType string,
config types.InstalledIntegrationConfig,
) (*types.InstalledIntegration, *model.ApiError) {
integration := types.InstalledIntegration{
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
OrgID: orgId,
Type: integrationType,
Config: config,
integrationId string,
config InstalledIntegrationConfig,
) (*InstalledIntegration, *model.ApiError) {
serializedConfig, err := config.Value()
if err != nil {
return nil, model.BadRequest(fmt.Errorf(
"could not serialize integration config: %w", err,
))
}
_, dbErr := r.store.BunDB().NewInsert().
Model(&integration).
On("conflict (type, org_id) DO UPDATE").
Set("config = EXCLUDED.config").
Exec(ctx)
_, dbErr := r.db.ExecContext(
ctx, `
INSERT INTO integrations_installed (
integration_id,
config_json
) values ($1, $2)
on conflict(integration_id) do update
set config_json=excluded.config_json
`, integrationId, serializedConfig,
)
if dbErr != nil {
return nil, model.InternalError(fmt.Errorf(
"could not insert record for integration installation: %w", dbErr,
))
}
res, apiErr := r.get(ctx, orgId, []string{integrationType})
res, apiErr := r.get(ctx, []string{integrationId})
if apiErr != nil || len(res) < 1 {
return nil, model.WrapApiError(
apiErr, "could not fetch installed integration",
)
}
installed := res[integrationType]
installed := res[integrationId]
return &installed, nil
}
func (r *InstalledIntegrationsSqliteRepo) delete(
ctx context.Context, orgId string, integrationType string,
ctx context.Context, integrationId string,
) *model.ApiError {
_, dbErr := r.store.BunDB().NewDelete().
Model(&types.InstalledIntegration{}).
Where("type = ?", integrationType).
Where("org_id = ?", orgId).
Exec(ctx)
_, dbErr := r.db.ExecContext(ctx, `
DELETE FROM integrations_installed where integration_id = ?
`, integrationId)
if dbErr != nil {
return model.InternalError(fmt.Errorf(
"could not delete installed integration record for %s: %w",
integrationType, dbErr,
integrationId, dbErr,
))
}

View File

@@ -5,22 +5,18 @@ import (
"slices"
"testing"
"github.com/SigNoz/signoz/pkg/query-service/auth"
"github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/dao"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/google/uuid"
)
func NewTestIntegrationsManager(t *testing.T) *Manager {
testDB := utils.NewQueryServiceDBForTests(t)
installedIntegrationsRepo, err := NewInstalledIntegrationsSqliteRepo(testDB)
installedIntegrationsRepo, err := NewInstalledIntegrationsSqliteRepo(testDB.SQLxDB())
if err != nil {
t.Fatalf("could not init sqlite DB for installed integrations: %v", err)
}
@@ -31,38 +27,6 @@ func NewTestIntegrationsManager(t *testing.T) *Manager {
}
}
func createTestUser() (*types.User, *model.ApiError) {
// Create a test user for auth
ctx := context.Background()
org, apiErr := dao.DB().CreateOrg(ctx, &types.Organization{
Name: "test",
})
if apiErr != nil {
return nil, apiErr
}
group, apiErr := dao.DB().GetGroupByName(ctx, constants.AdminGroup)
if apiErr != nil {
return nil, apiErr
}
auth.InitAuthCache(ctx)
userId := uuid.NewString()
return dao.DB().CreateUser(
ctx,
&types.User{
ID: userId,
Name: "test",
Email: userId[:8] + "test@test.com",
Password: "test",
OrgID: org.ID,
GroupID: group.ID,
},
true,
)
}
type TestAvailableIntegrationsRepo struct{}
func (t *TestAvailableIntegrationsRepo) list(

View File

@@ -25,12 +25,12 @@ import (
type LogParsingPipelineController struct {
Repo
GetIntegrationPipelines func(context.Context, string) ([]pipelinetypes.GettablePipeline, *model.ApiError)
GetIntegrationPipelines func(context.Context) ([]pipelinetypes.GettablePipeline, *model.ApiError)
}
func NewLogParsingPipelinesController(
sqlStore sqlstore.SQLStore,
getIntegrationPipelines func(context.Context, string) ([]pipelinetypes.GettablePipeline, *model.ApiError),
getIntegrationPipelines func(context.Context) ([]pipelinetypes.GettablePipeline, *model.ApiError),
) (*LogParsingPipelineController, error) {
repo := NewRepo(sqlStore)
return &LogParsingPipelineController{
@@ -164,7 +164,7 @@ func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
result = savedPipelines
}
integrationPipelines, apiErr := ic.GetIntegrationPipelines(ctx, defaultOrgID)
integrationPipelines, apiErr := ic.GetIntegrationPipelines(ctx)
if apiErr != nil {
return nil, model.WrapApiError(
apiErr, "could not get pipelines for installed integrations",

View File

@@ -131,11 +131,9 @@ func getOperators(ops []pipelinetypes.PipelineOperator) ([]pipelinetypes.Pipelin
)
}
operator.If = fmt.Sprintf(
`%s && (
(typeOf(%s) == "string" && %s matches "^\\s*{.*}\\s*$" ) ||
typeOf(%s) == "map[string]any"
)`, parseFromNotNilCheck, operator.ParseFrom, operator.ParseFrom, operator.ParseFrom,
`%s && %s matches "^\\s*{.*}\\s*$"`, parseFromNotNilCheck, operator.ParseFrom,
)
} else if operator.Type == "add" {
if strings.HasPrefix(operator.Value, "EXPR(") && strings.HasSuffix(operator.Value, ")") {
expression := strings.TrimSuffix(strings.TrimPrefix(operator.Value, "EXPR("), ")")

View File

@@ -646,7 +646,7 @@ func TestMembershipOpInProcessorFieldExpressions(t *testing.T) {
require := require.New(t)
testLogs := []model.SignozLog{
makeTestSignozLog("test log", map[string]any{
makeTestSignozLog("test log", map[string]interface{}{
"http.method": "GET",
"order.products": `{"ids": ["pid0", "pid1"]}`,
}),

View File

@@ -719,21 +719,6 @@ func parseFilterAttributeKeyRequest(r *http.Request) (*v3.FilterAttributeKeyRequ
aggregateOperator := v3.AggregateOperator(r.URL.Query().Get("aggregateOperator"))
aggregateAttribute := r.URL.Query().Get("aggregateAttribute")
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
tagType := v3.TagType(r.URL.Query().Get("tagType"))
// empty string is a valid tagType
// i.e retrieve all attributes
if tagType != "" {
// what is happening here?
// if tagType is undefined(uh oh javascript) or any invalid value, set it to empty string
// instead of failing the request. Ideally, we should fail the request.
// but we are not doing that to maintain backward compatibility.
if err := tagType.Validate(); err != nil {
// if the tagType is invalid, set it to empty string
tagType = ""
}
}
if err != nil {
limit = 50
}
@@ -754,7 +739,6 @@ func parseFilterAttributeKeyRequest(r *http.Request) (*v3.FilterAttributeKeyRequ
AggregateAttribute: aggregateAttribute,
Limit: limit,
SearchText: r.URL.Query().Get("searchText"),
TagType: tagType,
}
return &req, nil
}
@@ -877,7 +861,7 @@ func chTransformQuery(query string, variables map[string]interface{}) {
transformer := chVariables.NewQueryTransformer(query, varsForTransform)
transformedQuery, err := transformer.Transform()
if err != nil {
zap.L().Warn("failed to transform clickhouse query", zap.String("query", query), zap.Error(err))
zap.L().Warn("failed to transform clickhouse query", zap.Error(err))
}
zap.L().Info("transformed clickhouse query", zap.String("transformedQuery", transformedQuery), zap.String("originalQuery", query))
}

View File

@@ -112,7 +112,6 @@ func TestParseFilterAttributeKeyRequest(t *testing.T) {
expectedSearchText string
expectErr bool
errMsg string
expectedTagType v3.TagType
}{
{
desc: "valid operator and data source",
@@ -169,38 +168,6 @@ func TestParseFilterAttributeKeyRequest(t *testing.T) {
expectedDataSource: v3.DataSourceTraces,
expectedLimit: 50,
},
{
desc: "invalid tag type",
queryString: "aggregateOperator=avg&dataSource=traces&tagType=invalid",
expectedOperator: v3.AggregateOperatorAvg,
expectedDataSource: v3.DataSourceTraces,
expectedTagType: "",
expectedLimit: 50,
},
{
desc: "valid tag type",
queryString: "aggregateOperator=avg&dataSource=traces&tagType=resource",
expectedOperator: v3.AggregateOperatorAvg,
expectedDataSource: v3.DataSourceTraces,
expectedTagType: v3.TagTypeResource,
expectedLimit: 50,
},
{
desc: "valid tag type",
queryString: "aggregateOperator=avg&dataSource=traces&tagType=scope",
expectedOperator: v3.AggregateOperatorAvg,
expectedDataSource: v3.DataSourceTraces,
expectedTagType: v3.TagTypeInstrumentationScope,
expectedLimit: 50,
},
{
desc: "valid tag type",
queryString: "aggregateOperator=avg&dataSource=traces&tagType=tag",
expectedOperator: v3.AggregateOperatorAvg,
expectedDataSource: v3.DataSourceTraces,
expectedTagType: v3.TagTypeTag,
expectedLimit: 50,
},
}
for _, reqCase := range reqCases {

View File

@@ -439,7 +439,7 @@ func RegisterFirstUser(ctx context.Context, req *RegisterRequest) (*types.User,
}
user := &types.User{
ID: uuid.New().String(),
ID: uuid.NewString(),
Name: req.Name,
Email: req.Email,
Password: hash,
@@ -519,7 +519,7 @@ func RegisterInvitedUser(ctx context.Context, req *RegisterRequest, nopassword b
}
user := &types.User{
ID: uuid.New().String(),
ID: uuid.NewString(),
Name: req.Name,
Email: req.Email,
Password: hash,

View File

@@ -3,7 +3,6 @@ package auth
import (
"context"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/dao"
"github.com/SigNoz/signoz/pkg/types"
@@ -52,7 +51,7 @@ func InitAuthCache(ctx context.Context) error {
func GetUserFromReqContext(ctx context.Context) (*types.GettableUser, error) {
claims, ok := authtypes.ClaimsFromContext(ctx)
if !ok {
return nil, errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "no claims found in context")
return nil, errors.New("no claims found in context")
}
user := &types.GettableUser{

View File

@@ -248,7 +248,6 @@ func (q TagType) Validate() error {
type FilterAttributeKeyRequest struct {
DataSource DataSource `json:"dataSource"`
AggregateOperator AggregateOperator `json:"aggregateOperator"`
TagType TagType `json:"tagType"`
AggregateAttribute string `json:"aggregateAttribute"`
SearchText string `json:"searchText"`
Limit int `json:"limit"`

View File

@@ -35,7 +35,7 @@ func TestAWSIntegrationAccountLifecycle(t *testing.T) {
)
// Should be able to generate a connection url from UI - initializing an integration account
testAccountConfig := types.AccountConfig{
testAccountConfig := cloudintegrations.AccountConfig{
EnabledRegions: []string{"us-east-1", "us-east-2"},
}
connectionUrlResp := testbed.GenerateConnectionUrlFromQS(
@@ -65,8 +65,8 @@ func TestAWSIntegrationAccountLifecycle(t *testing.T) {
testAWSAccountId := "4563215233"
agentCheckInResp := testbed.CheckInAsAgentWithQS(
"aws", cloudintegrations.AgentCheckInRequest{
ID: testAccountId,
AccountID: testAWSAccountId,
AccountId: testAccountId,
CloudAccountId: testAWSAccountId,
},
)
require.Equal(testAccountId, agentCheckInResp.AccountId)
@@ -91,20 +91,20 @@ func TestAWSIntegrationAccountLifecycle(t *testing.T) {
require.Equal(testAWSAccountId, accountsListResp2.Accounts[0].CloudAccountId)
// Should be able to update account config from UI
testAccountConfig2 := types.AccountConfig{
testAccountConfig2 := cloudintegrations.AccountConfig{
EnabledRegions: []string{"us-east-2", "us-west-1"},
}
latestAccount := testbed.UpdateAccountConfigWithQS(
"aws", testAccountId, testAccountConfig2,
)
require.Equal(testAccountId, latestAccount.ID.StringValue())
require.Equal(testAccountId, latestAccount.Id)
require.Equal(testAccountConfig2, *latestAccount.Config)
// The agent should now receive latest account config.
agentCheckInResp1 := testbed.CheckInAsAgentWithQS(
"aws", cloudintegrations.AgentCheckInRequest{
ID: testAccountId,
AccountID: testAWSAccountId,
AccountId: testAccountId,
CloudAccountId: testAWSAccountId,
},
)
require.Equal(testAccountId, agentCheckInResp1.AccountId)
@@ -114,14 +114,14 @@ func TestAWSIntegrationAccountLifecycle(t *testing.T) {
// Should be able to disconnect/remove account from UI.
tsBeforeDisconnect := time.Now()
latestAccount = testbed.DisconnectAccountWithQS("aws", testAccountId)
require.Equal(testAccountId, latestAccount.ID.StringValue())
require.Equal(testAccountId, latestAccount.Id)
require.LessOrEqual(tsBeforeDisconnect, *latestAccount.RemovedAt)
// The agent should receive the disconnected status in account config post disconnection
agentCheckInResp2 := testbed.CheckInAsAgentWithQS(
"aws", cloudintegrations.AgentCheckInRequest{
ID: testAccountId,
AccountID: testAWSAccountId,
AccountId: testAccountId,
CloudAccountId: testAWSAccountId,
},
)
require.Equal(testAccountId, agentCheckInResp2.AccountId)
@@ -157,13 +157,13 @@ func TestAWSIntegrationServices(t *testing.T) {
testAWSAccountId := "389389489489"
testbed.CheckInAsAgentWithQS(
"aws", cloudintegrations.AgentCheckInRequest{
ID: testAccountId,
AccountID: testAWSAccountId,
AccountId: testAccountId,
CloudAccountId: testAWSAccountId,
},
)
testSvcConfig := types.CloudServiceConfig{
Metrics: &types.CloudServiceMetricsConfig{
testSvcConfig := cloudintegrations.CloudServiceConfig{
Metrics: &cloudintegrations.CloudServiceMetricsConfig{
Enabled: true,
},
}
@@ -199,7 +199,7 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
testbed := NewCloudIntegrationsTestBed(t, nil)
// configure a connected account
testAccountConfig := types.AccountConfig{
testAccountConfig := cloudintegrations.AccountConfig{
EnabledRegions: []string{"us-east-1", "us-east-2"},
}
connectionUrlResp := testbed.GenerateConnectionUrlFromQS(
@@ -218,8 +218,8 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
testAWSAccountId := "389389489489"
checkinResp := testbed.CheckInAsAgentWithQS(
"aws", cloudintegrations.AgentCheckInRequest{
ID: testAccountId,
AccountID: testAWSAccountId,
AccountId: testAccountId,
CloudAccountId: testAWSAccountId,
},
)
@@ -237,14 +237,14 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
// helper
setServiceConfig := func(svcId string, metricsEnabled bool, logsEnabled bool) {
testSvcConfig := types.CloudServiceConfig{}
testSvcConfig := cloudintegrations.CloudServiceConfig{}
if metricsEnabled {
testSvcConfig.Metrics = &types.CloudServiceMetricsConfig{
testSvcConfig.Metrics = &cloudintegrations.CloudServiceMetricsConfig{
Enabled: metricsEnabled,
}
}
if logsEnabled {
testSvcConfig.Logs = &types.CloudServiceLogsConfig{
testSvcConfig.Logs = &cloudintegrations.CloudServiceLogsConfig{
Enabled: logsEnabled,
}
}
@@ -262,8 +262,8 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
checkinResp = testbed.CheckInAsAgentWithQS(
"aws", cloudintegrations.AgentCheckInRequest{
ID: testAccountId,
AccountID: testAWSAccountId,
AccountId: testAccountId,
CloudAccountId: testAWSAccountId,
},
)
@@ -292,13 +292,13 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
require.True(strings.HasPrefix(logGroupPrefixes[0], "/aws/rds"))
// change regions and update service configs and validate config changes for agent
testAccountConfig2 := types.AccountConfig{
testAccountConfig2 := cloudintegrations.AccountConfig{
EnabledRegions: []string{"us-east-2", "us-west-1"},
}
latestAccount := testbed.UpdateAccountConfigWithQS(
"aws", testAccountId, testAccountConfig2,
)
require.Equal(testAccountId, latestAccount.ID.StringValue())
require.Equal(testAccountId, latestAccount.Id)
require.Equal(testAccountConfig2, *latestAccount.Config)
// disable metrics for one and logs for the other.
@@ -308,8 +308,8 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
checkinResp = testbed.CheckInAsAgentWithQS(
"aws", cloudintegrations.AgentCheckInRequest{
ID: testAccountId,
AccountID: testAWSAccountId,
AccountId: testAccountId,
CloudAccountId: testAWSAccountId,
},
)
require.Equal(testAccountId, checkinResp.AccountId)
@@ -453,8 +453,8 @@ func (tb *CloudIntegrationsTestBed) CheckInAsAgentWithQS(
}
func (tb *CloudIntegrationsTestBed) UpdateAccountConfigWithQS(
cloudProvider string, accountId string, newConfig types.AccountConfig,
) *types.CloudIntegration {
cloudProvider string, accountId string, newConfig cloudintegrations.AccountConfig,
) *cloudintegrations.AccountRecord {
respDataJson := tb.RequestQS(
fmt.Sprintf(
"/api/v1/cloud-integrations/%s/accounts/%s/config",
@@ -464,7 +464,7 @@ func (tb *CloudIntegrationsTestBed) UpdateAccountConfigWithQS(
},
)
var resp types.CloudIntegration
var resp cloudintegrations.AccountRecord
err := json.Unmarshal(respDataJson, &resp)
if err != nil {
tb.t.Fatalf("could not unmarshal apiResponse.Data json into Account")
@@ -475,7 +475,7 @@ func (tb *CloudIntegrationsTestBed) UpdateAccountConfigWithQS(
func (tb *CloudIntegrationsTestBed) DisconnectAccountWithQS(
cloudProvider string, accountId string,
) *types.CloudIntegration {
) *cloudintegrations.AccountRecord {
respDataJson := tb.RequestQS(
fmt.Sprintf(
"/api/v1/cloud-integrations/%s/accounts/%s/disconnect",
@@ -483,7 +483,7 @@ func (tb *CloudIntegrationsTestBed) DisconnectAccountWithQS(
), map[string]any{},
)
var resp types.CloudIntegration
var resp cloudintegrations.AccountRecord
err := json.Unmarshal(respDataJson, &resp)
if err != nil {
tb.t.Fatalf("could not unmarshal apiResponse.Data json into Account")

View File

@@ -166,7 +166,6 @@ func createTestUser() (*types.User, *model.ApiError) {
auth.InitAuthCache(ctx)
userId := uuid.NewString()
return dao.DB().CreateUser(
ctx,
&types.User{

View File

@@ -48,15 +48,10 @@ func NewTestSqliteDB(t *testing.T) (sqlStore sqlstore.SQLStore, testDBFilePath s
sqlmigration.NewModifyDatetimeFactory(),
sqlmigration.NewModifyOrgDomainFactory(),
sqlmigration.NewUpdateOrganizationFactory(sqlStore),
sqlmigration.NewAddAlertmanagerFactory(sqlStore),
sqlmigration.NewUpdateDashboardAndSavedViewsFactory(sqlStore),
sqlmigration.NewUpdatePatAndOrgDomainsFactory(sqlStore),
sqlmigration.NewUpdatePipelines(sqlStore),
sqlmigration.NewDropLicensesSitesFactory(sqlStore),
sqlmigration.NewUpdateInvitesFactory(sqlStore),
sqlmigration.NewUpdatePatFactory(sqlStore),
sqlmigration.NewAddVirtualFieldsFactory(),
sqlmigration.NewUpdateIntegrationsFactory(sqlStore),
),
)
if err != nil {

View File

@@ -70,7 +70,6 @@ func NewSQLMigrationProviderFactories(sqlstore sqlstore.SQLStore) factory.NamedM
sqlmigration.NewUpdateApdexTtlFactory(sqlstore),
sqlmigration.NewUpdateResetPasswordFactory(sqlstore),
sqlmigration.NewAddVirtualFieldsFactory(),
sqlmigration.NewUpdateIntegrationsFactory(sqlstore),
)
}

View File

@@ -1,441 +0,0 @@
package sqlmigration
import (
"context"
"database/sql"
"time"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
"go.uber.org/zap"
)
type updateIntegrations struct {
store sqlstore.SQLStore
}
func NewUpdateIntegrationsFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("update_integrations"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return newUpdateIntegrations(ctx, ps, c, sqlstore)
})
}
func newUpdateIntegrations(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
return &updateIntegrations{
store: store,
}, nil
}
func (migration *updateIntegrations) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
type existingInstalledIntegration struct {
bun.BaseModel `bun:"table:integrations_installed"`
IntegrationID string `bun:"integration_id,pk,type:text"`
ConfigJSON string `bun:"config_json,type:text"`
InstalledAt time.Time `bun:"installed_at,default:current_timestamp"`
}
type newInstalledIntegration struct {
bun.BaseModel `bun:"table:installed_integration"`
types.Identifiable
Type string `json:"type" bun:"type,type:text,unique:org_id_type"`
Config string `json:"config" bun:"config,type:text"`
InstalledAt time.Time `json:"installed_at" bun:"installed_at,default:current_timestamp"`
OrgID string `json:"org_id" bun:"org_id,type:text,unique:org_id_type"`
}
type existingCloudIntegration struct {
bun.BaseModel `bun:"table:cloud_integrations_accounts"`
CloudProvider string `bun:"cloud_provider,type:text,unique:cloud_provider_id"`
ID string `bun:"id,type:text,notnull,unique:cloud_provider_id"`
ConfigJSON string `bun:"config_json,type:text"`
CloudAccountID string `bun:"cloud_account_id,type:text"`
LastAgentReportJSON string `bun:"last_agent_report_json,type:text"`
CreatedAt time.Time `bun:"created_at,notnull,default:current_timestamp"`
RemovedAt *time.Time `bun:"removed_at,type:timestamp"`
}
type newCloudIntegration struct {
bun.BaseModel `bun:"table:cloud_integration"`
types.Identifiable
types.TimeAuditable
Provider string `json:"provider" bun:"provider,type:text"`
Config string `json:"config" bun:"config,type:text"`
AccountID string `json:"account_id" bun:"account_id,type:text"`
LastAgentReport string `json:"last_agent_report" bun:"last_agent_report,type:text"`
RemovedAt *time.Time `json:"removed_at" bun:"removed_at,type:timestamp"`
OrgID string `json:"org_id" bun:"org_id,type:text"`
}
type existingCloudIntegrationService struct {
bun.BaseModel `bun:"table:cloud_integrations_service_configs,alias:c1"`
CloudProvider string `bun:"cloud_provider,type:text,notnull,unique:service_cloud_provider_account"`
CloudAccountID string `bun:"cloud_account_id,type:text,notnull,unique:service_cloud_provider_account"`
ServiceID string `bun:"service_id,type:text,notnull,unique:service_cloud_provider_account"`
ConfigJSON string `bun:"config_json,type:text"`
CreatedAt time.Time `bun:"created_at,default:current_timestamp"`
}
type newCloudIntegrationService struct {
bun.BaseModel `bun:"table:cloud_integration_service,alias:cis"`
types.Identifiable
types.TimeAuditable
Type string `bun:"type,type:text,notnull,unique:cloud_integration_id_type"`
Config string `bun:"config,type:text"`
CloudIntegrationID string `bun:"cloud_integration_id,type:text,notnull,unique:cloud_integration_id_type"`
}
type StorablePersonalAccessToken struct {
bun.BaseModel `bun:"table:personal_access_token"`
types.Identifiable
types.TimeAuditable
OrgID string `json:"orgId" bun:"org_id,type:text,notnull"`
Role string `json:"role" bun:"role,type:text,notnull,default:'ADMIN'"`
UserID string `json:"userId" bun:"user_id,type:text,notnull"`
Token string `json:"token" bun:"token,type:text,notnull,unique"`
Name string `json:"name" bun:"name,type:text,notnull"`
ExpiresAt int64 `json:"expiresAt" bun:"expires_at,notnull,default:0"`
LastUsed int64 `json:"lastUsed" bun:"last_used,notnull,default:0"`
Revoked bool `json:"revoked" bun:"revoked,notnull,default:false"`
UpdatedByUserID string `json:"updatedByUserId" bun:"updated_by_user_id,type:text,notnull,default:''"`
}
func (migration *updateIntegrations) Up(ctx context.Context, db *bun.DB) error {
// begin transaction
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer tx.Rollback()
// don't run the migration if there are multiple org ids
orgIDs := make([]string, 0)
err = migration.store.BunDB().NewSelect().Model((*types.Organization)(nil)).Column("id").Scan(ctx, &orgIDs)
if err != nil {
return err
}
if len(orgIDs) > 1 {
return nil
}
// ---
// installed integrations
// ---
err = migration.
store.
Dialect().
RenameTableAndModifyModel(ctx, tx, new(existingInstalledIntegration), new(newInstalledIntegration), []string{OrgReference}, func(ctx context.Context) error {
existingIntegrations := make([]*existingInstalledIntegration, 0)
err = tx.
NewSelect().
Model(&existingIntegrations).
Scan(ctx)
if err != nil {
if err != sql.ErrNoRows {
return err
}
}
if err == nil && len(existingIntegrations) > 0 {
newIntegrations := migration.
CopyOldIntegrationsToNewIntegrations(tx, orgIDs[0], existingIntegrations)
_, err = tx.
NewInsert().
Model(&newIntegrations).
Exec(ctx)
if err != nil {
return err
}
}
return nil
})
if err != nil {
return err
}
// ---
// cloud integrations
// ---
err = migration.
store.
Dialect().
RenameTableAndModifyModel(ctx, tx, new(existingCloudIntegration), new(newCloudIntegration), []string{OrgReference}, func(ctx context.Context) error {
existingIntegrations := make([]*existingCloudIntegration, 0)
err = tx.
NewSelect().
Model(&existingIntegrations).
Where("removed_at IS NULL"). // we will only copy the accounts that are not removed
Scan(ctx)
if err != nil {
if err != sql.ErrNoRows {
return err
}
}
if err == nil && len(existingIntegrations) > 0 {
newIntegrations := migration.
CopyOldCloudIntegrationsToNewCloudIntegrations(tx, orgIDs[0], existingIntegrations)
_, err = tx.
NewInsert().
Model(&newIntegrations).
Exec(ctx)
if err != nil {
return err
}
}
return nil
})
if err != nil {
return err
}
// add unique constraint to cloud_integration table
_, err = tx.ExecContext(ctx, `CREATE UNIQUE INDEX IF NOT EXISTS unique_cloud_integration ON cloud_integration (id, provider, org_id)`)
if err != nil {
return err
}
// ---
// cloud integration service
// ---
err = migration.
store.
Dialect().
RenameTableAndModifyModel(ctx, tx, new(existingCloudIntegrationService), new(newCloudIntegrationService), []string{CloudIntegrationReference}, func(ctx context.Context) error {
existingServices := make([]*existingCloudIntegrationService, 0)
// only one service per provider,account id and type
// so there won't be any duplicates.
// just that these will be enabled as soon as the integration for the account is enabled
err = tx.
NewSelect().
Model(&existingServices).
Scan(ctx)
if err != nil {
if err != sql.ErrNoRows {
return err
}
}
if err == nil && len(existingServices) > 0 {
newServices := migration.
CopyOldCloudIntegrationServicesToNewCloudIntegrationServices(tx, orgIDs[0], existingServices)
if len(newServices) > 0 {
_, err = tx.
NewInsert().
Model(&newServices).
Exec(ctx)
if err != nil {
return err
}
}
}
return nil
})
if err != nil {
return err
}
if len(orgIDs) == 0 {
err = tx.Commit()
if err != nil {
return err
}
return nil
}
// copy the old aws integration user to the new user
err = migration.copyOldAwsIntegrationUser(tx, orgIDs[0])
if err != nil {
return err
}
err = tx.Commit()
if err != nil {
return err
}
return nil
}
func (migration *updateIntegrations) Down(ctx context.Context, db *bun.DB) error {
return nil
}
func (migration *updateIntegrations) CopyOldIntegrationsToNewIntegrations(tx bun.IDB, orgID string, existingIntegrations []*existingInstalledIntegration) []*newInstalledIntegration {
newIntegrations := make([]*newInstalledIntegration, 0)
for _, integration := range existingIntegrations {
newIntegrations = append(newIntegrations, &newInstalledIntegration{
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
Type: integration.IntegrationID,
Config: integration.ConfigJSON,
InstalledAt: integration.InstalledAt,
OrgID: orgID,
})
}
return newIntegrations
}
func (migration *updateIntegrations) CopyOldCloudIntegrationsToNewCloudIntegrations(tx bun.IDB, orgID string, existingIntegrations []*existingCloudIntegration) []*newCloudIntegration {
newIntegrations := make([]*newCloudIntegration, 0)
for _, integration := range existingIntegrations {
newIntegrations = append(newIntegrations, &newCloudIntegration{
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
TimeAuditable: types.TimeAuditable{
CreatedAt: integration.CreatedAt,
UpdatedAt: integration.CreatedAt,
},
Provider: integration.CloudProvider,
AccountID: integration.CloudAccountID,
Config: integration.ConfigJSON,
LastAgentReport: integration.LastAgentReportJSON,
RemovedAt: integration.RemovedAt,
OrgID: orgID,
})
}
return newIntegrations
}
func (migration *updateIntegrations) CopyOldCloudIntegrationServicesToNewCloudIntegrationServices(tx bun.IDB, orgID string, existingServices []*existingCloudIntegrationService) []*newCloudIntegrationService {
newServices := make([]*newCloudIntegrationService, 0)
for _, service := range existingServices {
var cloudIntegrationID string
err := tx.NewSelect().
Model((*newCloudIntegration)(nil)).
Column("id").
Where("account_id = ?", service.CloudAccountID).
Where("provider = ?", service.CloudProvider).
Where("org_id = ?", orgID).
Scan(context.Background(), &cloudIntegrationID)
if err != nil {
if err == sql.ErrNoRows {
continue
}
zap.L().Error("failed to get cloud integration id", zap.Error(err))
return nil
}
newServices = append(newServices, &newCloudIntegrationService{
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
TimeAuditable: types.TimeAuditable{
CreatedAt: service.CreatedAt,
UpdatedAt: service.CreatedAt,
},
Type: service.ServiceID,
Config: service.ConfigJSON,
CloudIntegrationID: cloudIntegrationID,
})
}
return newServices
}
func (migration *updateIntegrations) copyOldAwsIntegrationUser(tx bun.IDB, orgID string) error {
user := &types.User{}
err := tx.NewSelect().Model(user).Where("email = ?", "aws-integration@signoz.io").Scan(context.Background())
if err != nil {
if err == sql.ErrNoRows {
return nil
}
return err
}
// check if the id is already an uuid
if _, err := uuid.Parse(user.ID); err == nil {
return nil
}
// new user
newUser := &types.User{
ID: uuid.New().String(),
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
OrgID: orgID,
Name: user.Name,
Email: user.Email,
GroupID: user.GroupID,
Password: user.Password,
}
// get the pat for old user
pat := &StorablePersonalAccessToken{}
err = tx.NewSelect().Model(pat).Where("user_id = ? and revoked = false", "aws-integration").Scan(context.Background())
if err != nil {
if err == sql.ErrNoRows {
// delete the old user
_, err = tx.ExecContext(context.Background(), `DELETE FROM users WHERE id = ?`, user.ID)
if err != nil {
return err
}
return nil
}
return err
}
// new pat
newPAT := &StorablePersonalAccessToken{
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
OrgID: orgID,
UserID: newUser.ID,
Token: pat.Token,
Name: pat.Name,
ExpiresAt: pat.ExpiresAt,
LastUsed: pat.LastUsed,
Revoked: pat.Revoked,
Role: pat.Role,
}
// delete old user
_, err = tx.ExecContext(context.Background(), `DELETE FROM users WHERE id = ?`, user.ID)
if err != nil {
return err
}
// insert the new user
_, err = tx.NewInsert().Model(newUser).Exec(context.Background())
if err != nil {
return err
}
// insert the new pat
_, err = tx.NewInsert().Model(newPAT).Exec(context.Background())
if err != nil {
return err
}
return nil
}

View File

@@ -26,9 +26,8 @@ var (
)
var (
OrgReference = "org"
UserReference = "user"
CloudIntegrationReference = "cloud_integration"
OrgReference = "org"
UserReference = "user"
)
func New(

View File

@@ -17,15 +17,13 @@ var (
)
var (
Org = "org"
User = "user"
CloudIntegration = "cloud_integration"
Org = "org"
User = "user"
)
var (
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
)
type dialect struct {
@@ -204,8 +202,6 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
fkReferences = append(fkReferences, OrgReference)
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
fkReferences = append(fkReferences, UserReference)
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
fkReferences = append(fkReferences, CloudIntegrationReference)
}
}

View File

@@ -3,13 +3,10 @@ package telemetrylogs
import (
"context"
"fmt"
"strconv"
"strings"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/huandu/go-sqlbuilder"
)
@@ -84,13 +81,9 @@ func (c *conditionBuilder) GetColumn(ctx context.Context, key *telemetrytypes.Te
case telemetrytypes.FieldDataTypeBool:
return logsV2Columns["attributes_bool"], nil
}
case telemetrytypes.FieldContextLog, telemetrytypes.FieldContextUnspecified:
case telemetrytypes.FieldContextLog:
col, ok := logsV2Columns[key.Name]
if !ok {
// check if the key has body JSON search
if strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
return logsV2Columns["body"], nil
}
return nil, qbtypes.ErrColumnNotFound
}
return col, nil
@@ -144,83 +137,6 @@ func (c *conditionBuilder) GetTableFieldName(ctx context.Context, key *telemetry
return column.Name, nil
}
func parseStrValue(valueStr string, operator qbtypes.FilterOperator) (telemetrytypes.FieldDataType, any) {
valueType := telemetrytypes.FieldDataTypeString
// return the value as is for the following operators
// as they are always string
if operator == qbtypes.FilterOperatorContains || operator == qbtypes.FilterOperatorNotContains ||
operator == qbtypes.FilterOperatorRegexp || operator == qbtypes.FilterOperatorNotRegexp ||
operator == qbtypes.FilterOperatorLike || operator == qbtypes.FilterOperatorNotLike ||
operator == qbtypes.FilterOperatorILike || operator == qbtypes.FilterOperatorNotILike {
return valueType, valueStr
}
var err error
var parsedValue any
if parsedValue, err = strconv.ParseBool(valueStr); err == nil {
valueType = telemetrytypes.FieldDataTypeBool
} else if parsedValue, err = strconv.ParseInt(valueStr, 10, 64); err == nil {
valueType = telemetrytypes.FieldDataTypeInt64
} else if parsedValue, err = strconv.ParseFloat(valueStr, 64); err == nil {
valueType = telemetrytypes.FieldDataTypeFloat64
} else {
parsedValue = valueStr
valueType = telemetrytypes.FieldDataTypeString
}
return valueType, parsedValue
}
func inferDataType(value any, operator qbtypes.FilterOperator, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any) {
// check if the value is a int, float, string, bool
valueType := telemetrytypes.FieldDataTypeUnspecified
switch v := value.(type) {
case []any:
// take the first element and infer the type
if len(v) > 0 {
valueType, _ = inferDataType(v[0], operator, key)
}
return valueType, v
case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64:
valueType = telemetrytypes.FieldDataTypeInt64
case float32, float64:
valueType = telemetrytypes.FieldDataTypeFloat64
case string:
valueType, value = parseStrValue(v, operator)
case bool:
valueType = telemetrytypes.FieldDataTypeBool
}
// check if it is array
if strings.HasSuffix(key.Name, "[*]") {
valueType = telemetrytypes.FieldDataType{String: valuer.NewString(fmt.Sprintf("[]%s", valueType.StringValue()))}
}
return valueType, value
}
func GetBodyJSONKey(_ context.Context, key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any) (string, any) {
dataType, value := inferDataType(value, operator, key)
// all body json keys are of the form body.
path := strings.Join(strings.Split(key.Name, ".")[1:], ".")
// for array types, we need to extract the value from the JSON_QUERY
if dataType == telemetrytypes.FieldDataTypeArrayInt64 ||
dataType == telemetrytypes.FieldDataTypeArrayFloat64 ||
dataType == telemetrytypes.FieldDataTypeArrayString ||
dataType == telemetrytypes.FieldDataTypeArrayBool ||
dataType == telemetrytypes.FieldDataTypeArrayNumber {
return fmt.Sprintf("JSONExtract(JSON_QUERY(body, '$.%s'), '%s')", path, dataType.CHDataType()), value
}
// for all other types, we need to extract the value from the JSON_VALUE
return fmt.Sprintf("JSONExtract(JSON_VALUE(body, '$.%s'), '%s')", path, dataType.CHDataType()), value
}
func (c *conditionBuilder) GetCondition(
ctx context.Context,
key *telemetrytypes.TelemetryFieldKey,
@@ -238,10 +154,6 @@ func (c *conditionBuilder) GetCondition(
return "", err
}
if strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
tblFieldName, value = GetBodyJSONKey(ctx, key, operator, value)
}
tblFieldName, value = telemetrytypes.DataTypeCollisionHandledFieldName(key, value, tblFieldName)
// regular operators

Some files were not shown because too many files have changed in this diff Show More