mirror of
https://github.com/SigNoz/signoz.git
synced 2026-03-02 12:02:09 +00:00
Compare commits
19 Commits
tvats-expo
...
refactor/c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a09dc325de | ||
|
|
37cd1ab84b | ||
|
|
379b4f7fc4 | ||
|
|
5e536ae077 | ||
|
|
234585e642 | ||
|
|
2cc14f1ad4 | ||
|
|
dc4ed4d239 | ||
|
|
8184e60c03 | ||
|
|
11c793deb0 | ||
|
|
7281c36873 | ||
|
|
9557d20a48 | ||
|
|
ad22137aa3 | ||
|
|
95fc905448 | ||
|
|
40288776e8 | ||
|
|
9f5afaf36f | ||
|
|
3f69d5cdc2 | ||
|
|
98617b5120 | ||
|
|
df30852296 | ||
|
|
b9beabb425 |
56
.github/workflows/jsci.yaml
vendored
56
.github/workflows/jsci.yaml
vendored
@@ -61,3 +61,59 @@ jobs:
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
md-languages:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: validate md languages
|
||||
run: bash frontend/scripts/validate-md-languages.sh
|
||||
authz:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Install frontend dependencies
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
yarn install
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
|
||||
- name: Install Python dependencies
|
||||
working-directory: ./tests/integration
|
||||
run: |
|
||||
uv sync
|
||||
|
||||
- name: Start test environment
|
||||
run: |
|
||||
make py-test-setup
|
||||
|
||||
- name: Generate permissions.type.ts
|
||||
run: |
|
||||
node frontend/scripts/generate-permissions-type.js
|
||||
|
||||
- name: Teardown test environment
|
||||
if: always()
|
||||
run: |
|
||||
make py-test-teardown
|
||||
|
||||
- name: Check for changes
|
||||
run: |
|
||||
if ! git diff --exit-code frontend/src/hooks/useAuthZ/permissions.type.ts; then
|
||||
echo "::error::frontend/src/hooks/useAuthZ/permissions.type.ts is out of date. Please run the generator locally and commit the changes: npm run generate:permissions-type (from the frontend directory)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@@ -17,7 +17,5 @@
|
||||
},
|
||||
"[html]": {
|
||||
"editor.defaultFormatter": "vscode.html-language-features"
|
||||
},
|
||||
"python-envs.defaultEnvManager": "ms-python.python:system",
|
||||
"python-envs.pythonProjects": []
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1763,6 +1763,134 @@ components:
|
||||
- type
|
||||
- orgId
|
||||
type: object
|
||||
ServiceaccounttypesFactorAPIKey:
|
||||
properties:
|
||||
createdAt:
|
||||
format: date-time
|
||||
type: string
|
||||
expires_at:
|
||||
minimum: 0
|
||||
type: integer
|
||||
id:
|
||||
type: string
|
||||
key:
|
||||
type: string
|
||||
last_used:
|
||||
format: date-time
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
service_account_id:
|
||||
type: string
|
||||
updatedAt:
|
||||
format: date-time
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
- key
|
||||
- expires_at
|
||||
- last_used
|
||||
- service_account_id
|
||||
type: object
|
||||
ServiceaccounttypesGettableFactorAPIKeyWithKey:
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
key:
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
- key
|
||||
type: object
|
||||
ServiceaccounttypesPostableFactorAPIKey:
|
||||
properties:
|
||||
expires_at:
|
||||
minimum: 0
|
||||
type: integer
|
||||
name:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
- expires_at
|
||||
type: object
|
||||
ServiceaccounttypesPostableServiceAccount:
|
||||
properties:
|
||||
email:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
roles:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
required:
|
||||
- name
|
||||
- email
|
||||
- roles
|
||||
type: object
|
||||
ServiceaccounttypesServiceAccount:
|
||||
properties:
|
||||
createdAt:
|
||||
format: date-time
|
||||
type: string
|
||||
email:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
orgID:
|
||||
type: string
|
||||
roles:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
status:
|
||||
type: string
|
||||
updatedAt:
|
||||
format: date-time
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
- name
|
||||
- email
|
||||
- roles
|
||||
- status
|
||||
- orgID
|
||||
type: object
|
||||
ServiceaccounttypesUpdatableFactorAPIKey:
|
||||
properties:
|
||||
expires_at:
|
||||
minimum: 0
|
||||
type: integer
|
||||
name:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
- expires_at
|
||||
type: object
|
||||
ServiceaccounttypesUpdatableServiceAccount:
|
||||
properties:
|
||||
email:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
roles:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
required:
|
||||
- name
|
||||
- email
|
||||
- roles
|
||||
type: object
|
||||
ServiceaccounttypesUpdatableServiceAccountStatus:
|
||||
properties:
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
- status
|
||||
type: object
|
||||
TelemetrytypesFieldContext:
|
||||
enum:
|
||||
- metric
|
||||
@@ -2858,186 +2986,6 @@ paths:
|
||||
summary: Update auth domain
|
||||
tags:
|
||||
- authdomains
|
||||
/api/v1/export_raw_data:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoints allows simple query exporting raw data for traces
|
||||
and logs
|
||||
operationId: HandleExportRawDataGET
|
||||
parameters:
|
||||
- in: query
|
||||
name: format
|
||||
schema:
|
||||
default: csv
|
||||
enum:
|
||||
- csv
|
||||
- jsonl
|
||||
type: string
|
||||
- in: query
|
||||
name: signal
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/components/schemas/TelemetrytypesSignal'
|
||||
- deprecated: true
|
||||
in: query
|
||||
name: source
|
||||
schema:
|
||||
deprecated: true
|
||||
type: string
|
||||
- in: query
|
||||
name: start
|
||||
schema:
|
||||
minimum: 0
|
||||
type: integer
|
||||
- in: query
|
||||
name: end
|
||||
schema:
|
||||
minimum: 0
|
||||
type: integer
|
||||
- in: query
|
||||
name: limit
|
||||
schema:
|
||||
default: 10000
|
||||
maximum: 50000
|
||||
minimum: 1
|
||||
type: integer
|
||||
- deprecated: true
|
||||
in: query
|
||||
name: filter
|
||||
schema:
|
||||
deprecated: true
|
||||
type: string
|
||||
- content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Filter'
|
||||
in: query
|
||||
name: filterExpression
|
||||
- deprecated: true
|
||||
in: query
|
||||
name: columns
|
||||
schema:
|
||||
deprecated: true
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
- in: query
|
||||
name: selectFields
|
||||
schema:
|
||||
items:
|
||||
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
|
||||
type: array
|
||||
- deprecated: true
|
||||
in: query
|
||||
name: order_by
|
||||
schema:
|
||||
deprecated: true
|
||||
type: string
|
||||
- in: query
|
||||
name: order
|
||||
schema:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
|
||||
type: array
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Export raw data
|
||||
tags:
|
||||
- logs
|
||||
- traces
|
||||
post:
|
||||
deprecated: false
|
||||
description: This endpoints allows complex query exporting raw data for traces
|
||||
and logs
|
||||
operationId: HandleExportRawDataPOST
|
||||
parameters:
|
||||
- in: query
|
||||
name: format
|
||||
schema:
|
||||
default: csv
|
||||
enum:
|
||||
- csv
|
||||
- jsonl
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Export raw data
|
||||
tags:
|
||||
- logs
|
||||
- traces
|
||||
/api/v1/fields/keys:
|
||||
get:
|
||||
deprecated: false
|
||||
@@ -4717,6 +4665,586 @@ paths:
|
||||
summary: Patch objects for a role by relation
|
||||
tags:
|
||||
- role
|
||||
/api/v1/service_accounts:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoint lists the service accounts for an organisation
|
||||
operationId: ListServiceAccounts
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
items:
|
||||
$ref: '#/components/schemas/ServiceaccounttypesServiceAccount'
|
||||
type: array
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
- status
|
||||
- data
|
||||
type: object
|
||||
description: OK
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: List service accounts
|
||||
tags:
|
||||
- serviceaccount
|
||||
post:
|
||||
deprecated: false
|
||||
description: This endpoint creates a service account
|
||||
operationId: CreateServiceAccount
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ServiceaccounttypesPostableServiceAccount'
|
||||
responses:
|
||||
"201":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/TypesIdentifiable'
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
- status
|
||||
- data
|
||||
type: object
|
||||
description: Created
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"409":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Conflict
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Create service account
|
||||
tags:
|
||||
- serviceaccount
|
||||
/api/v1/service_accounts/{id}:
|
||||
delete:
|
||||
deprecated: false
|
||||
description: This endpoint deletes an existing service account
|
||||
operationId: DeleteServiceAccount
|
||||
parameters:
|
||||
- in: path
|
||||
name: id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"204":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
description: No Content
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Deletes a service account
|
||||
tags:
|
||||
- serviceaccount
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoint gets an existing service account
|
||||
operationId: GetServiceAccount
|
||||
parameters:
|
||||
- in: path
|
||||
name: id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/ServiceaccounttypesServiceAccount'
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
- status
|
||||
- data
|
||||
type: object
|
||||
description: OK
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Gets a service account
|
||||
tags:
|
||||
- serviceaccount
|
||||
put:
|
||||
deprecated: false
|
||||
description: This endpoint updates an existing service account
|
||||
operationId: UpdateServiceAccount
|
||||
parameters:
|
||||
- in: path
|
||||
name: id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ServiceaccounttypesUpdatableServiceAccount'
|
||||
responses:
|
||||
"204":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
description: No Content
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Updates a service account
|
||||
tags:
|
||||
- serviceaccount
|
||||
/api/v1/service_accounts/{id}/keys:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoint lists the service account keys
|
||||
operationId: ListServiceAccountKeys
|
||||
parameters:
|
||||
- in: path
|
||||
name: id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
items:
|
||||
$ref: '#/components/schemas/ServiceaccounttypesFactorAPIKey'
|
||||
type: array
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
- status
|
||||
- data
|
||||
type: object
|
||||
description: OK
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: List service account keys
|
||||
tags:
|
||||
- serviceaccount
|
||||
post:
|
||||
deprecated: false
|
||||
description: This endpoint creates a service account key
|
||||
operationId: CreateServiceAccountKey
|
||||
parameters:
|
||||
- in: path
|
||||
name: id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ServiceaccounttypesPostableFactorAPIKey'
|
||||
responses:
|
||||
"201":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/ServiceaccounttypesGettableFactorAPIKeyWithKey'
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
- status
|
||||
- data
|
||||
type: object
|
||||
description: Created
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"409":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Conflict
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Create a service account key
|
||||
tags:
|
||||
- serviceaccount
|
||||
/api/v1/service_accounts/{id}/keys/{fid}:
|
||||
delete:
|
||||
deprecated: false
|
||||
description: This endpoint revokes an existing service account key
|
||||
operationId: RevokeServiceAccountKey
|
||||
parameters:
|
||||
- in: path
|
||||
name: id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- in: path
|
||||
name: fid
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"204":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
description: No Content
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Revoke a service account key
|
||||
tags:
|
||||
- serviceaccount
|
||||
put:
|
||||
deprecated: false
|
||||
description: This endpoint updates an existing service account key
|
||||
operationId: UpdateServiceAccountKey
|
||||
parameters:
|
||||
- in: path
|
||||
name: id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- in: path
|
||||
name: fid
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ServiceaccounttypesUpdatableFactorAPIKey'
|
||||
responses:
|
||||
"204":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
description: No Content
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Updates a service account key
|
||||
tags:
|
||||
- serviceaccount
|
||||
/api/v1/service_accounts/{id}/status:
|
||||
put:
|
||||
deprecated: false
|
||||
description: This endpoint updates an existing service account status
|
||||
operationId: UpdateServiceAccountStatus
|
||||
parameters:
|
||||
- in: path
|
||||
name: id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ServiceaccounttypesUpdatableServiceAccountStatus'
|
||||
responses:
|
||||
"204":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
description: No Content
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Updates a service account status
|
||||
tags:
|
||||
- serviceaccount
|
||||
/api/v1/user:
|
||||
get:
|
||||
deprecated: false
|
||||
|
||||
127
docs/contributing/go/abstractions.md
Normal file
127
docs/contributing/go/abstractions.md
Normal file
@@ -0,0 +1,127 @@
|
||||
# Abstractions
|
||||
|
||||
This document provides rules for deciding when a new type, interface, or intermediate representation is warranted in Go code. The goal is to keep the codebase navigable by ensuring every abstraction earns its place.
|
||||
|
||||
## The cost of a new abstraction
|
||||
|
||||
Every exported type, interface, or wrapper is a permanent commitment. It must be named, documented, tested, and understood by every future contributor. It creates a new concept in the codebase vocabulary. Before introducing one, verify that the cost is justified by a concrete benefit that cannot be achieved with existing mechanisms.
|
||||
|
||||
## Before you introduce anything new
|
||||
|
||||
Answer these four questions. If writing a PR, include the answers in the description.
|
||||
|
||||
1. **What already exists?** Name the specific type, function, interface, or library that covers this ground today.
|
||||
2. **What does the new abstraction add?** Name the concrete operation, guarantee, or capability. "Cleaner" or "more reusable" are not sufficient; name what the caller can do that it could not do before.
|
||||
3. **What does the new abstraction drop?** If it wraps or mirrors an existing structure, list what it cannot represent. Every gap must be either justified or handled with an explicit error.
|
||||
4. **Who consumes it?** List the call sites. If there is only one producer and one consumer in the same call chain, you likely need a function, not a type.
|
||||
|
||||
## Rules
|
||||
|
||||
### 1. Prefer functions over types
|
||||
|
||||
If a piece of logic has one input and one output, write a function. Do not create a struct to hold intermediate state that is built in one place and read in one place. A function is easier to test, easier to inline, and does not expand the vocabulary of the codebase.
|
||||
|
||||
```go
|
||||
// Prefer this:
|
||||
func ConvertConfig(src ExternalConfig) (InternalConfig, error)
|
||||
|
||||
// Over this:
|
||||
type ConfigAdapter struct { ... }
|
||||
func NewConfigAdapter(src ExternalConfig) *ConfigAdapter
|
||||
func (a *ConfigAdapter) ToInternal() (InternalConfig, error)
|
||||
```
|
||||
|
||||
The two-step version is only justified when `ConfigAdapter` has multiple distinct consumers that use it in different ways.
|
||||
|
||||
### 2. Do not duplicate structures you do not own
|
||||
|
||||
When a library or external package produces a structured output, operate on that output directly. Do not create a parallel type that mirrors a subset of its fields.
|
||||
|
||||
A partial copy will:
|
||||
- **Silently lose data** when the source has fields or variants the copy does not account for.
|
||||
- **Drift** when the source evolves and the copy is not updated in lockstep.
|
||||
- **Add a conversion step** that doubles the code surface and the opportunity for bugs.
|
||||
|
||||
If you need to shield consumers from a dependency, define a narrow interface over the dependency's type rather than copying its shape into a new struct.
|
||||
|
||||
### 3. Never silently discard input
|
||||
|
||||
If your code receives structured input and cannot handle part of it, return an error. Do not silently return nil, skip the element, or produce a partial result. Silent data loss is the hardest class of bug to detect because the code appears to work, it just produces wrong results.
|
||||
|
||||
```go
|
||||
// Wrong: silently ignores the unrecognized case.
|
||||
default:
|
||||
return nil
|
||||
|
||||
// Right: makes the gap visible.
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported %T value: %v", v, v)
|
||||
```
|
||||
|
||||
This applies broadly: type switches, format conversions, data migrations, enum mappings, configuration parsing. Anywhere a `default` or `else` branch can swallow input, it should surface an error instead.
|
||||
|
||||
### 4. Do not expose methods that lose information
|
||||
|
||||
A method on a structured type should not strip meaning from the structure it belongs to. If a caller needs to iterate over elements for a specific purpose (validation, aggregation, logging), write that logic as a standalone function that operates on the structure with full context, rather than adding a method that returns a reduced view.
|
||||
|
||||
```go
|
||||
// Problematic: callers cannot distinguish how items were related.
|
||||
func (o *Order) AllLineItems() []LineItem { ... }
|
||||
|
||||
// Better: the validation logic operates on the full structure.
|
||||
func ValidateOrder(o *Order) error { ... }
|
||||
```
|
||||
|
||||
Public methods shape how a type is used. Once a lossy accessor exists, callers will depend on it, and the lost information becomes unrecoverable at those call sites.
|
||||
|
||||
### 5. Interfaces should be discovered, not predicted
|
||||
|
||||
Do not define an interface before you have at least two concrete implementations that need it. An interface with one implementation is not abstraction; it is indirection that makes it harder to navigate from call site to implementation.
|
||||
|
||||
The exception is interfaces required for testing (e.g., for mocking an external dependency). In that case, define the interface in the **consuming** package, not the providing package, following the Go convention of [accepting interfaces and returning structs](https://go.dev/wiki/CodeReviewComments#interfaces).
|
||||
|
||||
### 6. Wrappers must add semantics, not just rename
|
||||
|
||||
A wrapper type is justified when it adds meaning, validation, or invariants that the underlying type does not carry. It is not justified when it merely renames fields or reorganizes the same data into a different shape.
|
||||
|
||||
```go
|
||||
// Justified: adds validation that the underlying string does not carry.
|
||||
type OrgID struct{ value string }
|
||||
func NewOrgID(s string) (OrgID, error) { /* validates format */ }
|
||||
|
||||
// Not justified: renames fields with no new invariant or behavior.
|
||||
type UserInfo struct {
|
||||
Name string // same as source.Name
|
||||
Email string // same as source.Email
|
||||
}
|
||||
```
|
||||
|
||||
Ask: what does the wrapper guarantee that the underlying type does not? If the answer is nothing, use the underlying type directly.
|
||||
|
||||
## When a new type IS warranted
|
||||
|
||||
A new type earns its place when it meets **at least one** of these criteria:
|
||||
|
||||
- **Serialization boundary**: It must be persisted, sent over the wire, or written to config. The source type is unsuitable (unexported fields, function pointers, cycles).
|
||||
- **Invariant enforcement**: The constructor or methods enforce constraints that raw data does not carry (e.g., non-empty, validated format, bounded range).
|
||||
- **Multiple distinct consumers**: Three or more call sites use the type in meaningfully different ways. The type is the shared vocabulary between them.
|
||||
- **Dependency firewall**: The type lives in a lightweight package so that consumers avoid importing a heavy dependency.
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- A function is almost always simpler than a type. Start with a function; promote to a type only when you have evidence of need.
|
||||
- Never silently drop data. If you cannot handle it, error.
|
||||
- If your new type mirrors an existing one, you need a strong reason beyond "nicer to work with".
|
||||
- If your type has one producer and one consumer, it is indirection, not abstraction.
|
||||
- Interfaces come from need (multiple implementations), not from prediction.
|
||||
- When in doubt, do not add it. It is easier to add an abstraction later when the need is clear than to remove one after it has spread through the codebase.
|
||||
|
||||
## Further reading
|
||||
|
||||
These works and our own lessions shaped the above guidelines
|
||||
|
||||
- [The Wrong Abstraction](https://sandimetz.com/blog/2016/1/20/the-wrong-abstraction) - Sandi Metz. The wrong abstraction is worse than duplication. If you find yourself passing parameters and adding conditional paths through shared code, inline it back into every caller and let the duplication show you what the right abstraction is.
|
||||
- [Write code that is easy to delete, not easy to extend](https://programmingisterrible.com/post/139222674273/write-code-that-is-easy-to-delete-not-easy-to) - tef. Every abstraction is a bet on the future. Optimize for how cheaply you can remove code when the bet is wrong, not for how easily you can extend it when the bet is right.
|
||||
- [Goodbye, Clean Code](https://overreacted.io/goodbye-clean-code/) - Dan Abramov. A refactoring that removes duplication can look cleaner while making the code harder to change. Clean-looking and easy-to-change are not the same thing.
|
||||
- [A Philosophy of Software Design](https://www.amazon.com/Philosophy-Software-Design-John-Ousterhout/dp/1732102201) - John Ousterhout. Good abstractions are deep: simple interface, complex implementation. A "false abstraction" omits important details while appearing simple, and is worse than no abstraction at all. ([Summary by Pragmatic Engineer](https://blog.pragmaticengineer.com/a-philosophy-of-software-design-review/))
|
||||
- [Simplicity is Complicated](https://go.dev/talks/2015/simplicity-is-complicated.slide) - Rob Pike. Go-specific. Fewer orthogonal concepts that compose predictably beat many overlapping ones. Features were left out of Go deliberately; the same discipline applies to your own code.
|
||||
@@ -123,7 +123,6 @@ if err := router.Handle("/api/v1/things", handler.New(
|
||||
Description: "This endpoint creates a thing",
|
||||
Request: new(types.PostableThing),
|
||||
RequestContentType: "application/json",
|
||||
RequestQuery: new(types.QueryableThing),
|
||||
Response: new(types.GettableThing),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
@@ -156,8 +155,6 @@ The `handler.New` function ties the HTTP handler to OpenAPI metadata via `OpenAP
|
||||
- **Request / RequestContentType**:
|
||||
- `Request` is a Go type that describes the request body or form.
|
||||
- `RequestContentType` is usually `"application/json"` or `"application/x-www-form-urlencoded"` (for callbacks like SAML).
|
||||
- **RequestQuery**:
|
||||
- `RequestQuery` is a Go type that descirbes query url params.
|
||||
- **RequestExamples**: An array of `handler.OpenAPIExample` that provide concrete request payloads in the generated spec. See [Adding request examples](#adding-request-examples) below.
|
||||
- **Response / ResponseContentType**:
|
||||
- `Response` is the Go type for the successful response payload.
|
||||
|
||||
126
docs/contributing/go/packages.md
Normal file
126
docs/contributing/go/packages.md
Normal file
@@ -0,0 +1,126 @@
|
||||
# Packages
|
||||
|
||||
All shared Go code in SigNoz lives under `pkg/`. Each package represents a distinct domain concept and exposes a clear public interface. This guide covers the conventions for creating, naming, and organising packages so the codebase stays consistent as it grows.
|
||||
|
||||
## How should I name a package?
|
||||
|
||||
Use short, lowercase, single-word names. No underscores or camelCase (`querier`, `cache`, `authz`, not `query_builder` or `dataStore`).
|
||||
|
||||
Names must be **domain-specific**. A package name should tell you what problem domain it deals with, not what data structure it wraps. Prefer `alertmanager` over `manager`, `licensing` over `checker`.
|
||||
|
||||
Avoid generic names like `util`, `helpers`, `common`, `misc`, or `base`. If you can't name it, the code probably belongs in an existing package.
|
||||
|
||||
## When should I create a new package?
|
||||
|
||||
Create a new package when:
|
||||
|
||||
- The functionality represents a **distinct domain concept** (e.g., `authz`, `licensing`, `cache`).
|
||||
- Two or more other packages would import it; it serves as shared infrastructure.
|
||||
- The code has a clear public interface that can stand on its own.
|
||||
|
||||
Do **not** create a new package when:
|
||||
|
||||
- There is already a package that covers the same domain. Extend the existing package instead.
|
||||
- The code is only used in one place. Keep it local to the caller.
|
||||
- You are splitting purely for file size. Use multiple files within the same package instead.
|
||||
|
||||
## How should I lay out a package?
|
||||
|
||||
A typical package looks like:
|
||||
|
||||
```
|
||||
pkg/cache/
|
||||
├── cache.go # Public interface + exported types
|
||||
├── config.go # Configuration types if needed
|
||||
├── memorycache/ # Implementation sub-package
|
||||
├── rediscache/ # Another implementation
|
||||
└── cachetest/ # Test helpers for consumers
|
||||
```
|
||||
|
||||
Follow these rules:
|
||||
|
||||
1. **Interface-first file**: The file matching the package name (e.g., `cache.go` in `pkg/cache/`) should define the public interface and core exported types. Keep implementation details out of this file.
|
||||
|
||||
2. **One responsibility per file**: Name files after what they contain (`config.go`, `handler.go`, `service.go`), not after the package name. If a package merges two concerns, prefix files to group them (e.g., `memory_store.go`, `redis_store.go` in a storage package).
|
||||
|
||||
3. **Sub-packages for implementations**: When a package defines an interface with multiple implementations, put each implementation in its own sub-package (`memorycache/`, `rediscache/`). This keeps the parent package import-free of implementation dependencies.
|
||||
|
||||
4. **Test helpers in `{pkg}test/`**: If consumers need test mocks or builders, put them in a `{pkg}test/` sub-package (e.g., `cachetest/`, `sqlstoretest/`). This avoids polluting the main package with test-only code.
|
||||
|
||||
5. **Test files stay alongside source**: Unit tests go in `_test.go` files next to the code they test, in the same package.
|
||||
|
||||
## How should I name symbols?
|
||||
|
||||
### Exported symbols
|
||||
|
||||
- **Interfaces**: For single-method interfaces, follow the standard `-er` suffix convention (`Reader`, `Writer`, `Closer`). For multi-method interfaces, use clear nouns (`Cache`, `Store`, `Provider`).
|
||||
- **Constructors**: `New<Type>(...)` (e.g., `NewMemoryCache()`).
|
||||
- **Avoid stutter**: Since callers qualify with the package name, don't repeat it. Write `cache.Cache`, not `cache.CacheInterface`. Write `authz.FromRole`, not `authz.AuthzFromRole`.
|
||||
|
||||
### Unexported symbols
|
||||
|
||||
- Struct receivers: one or two characters (`c`, `f`, `br`).
|
||||
- Helper functions: descriptive lowercase names (`parseToken`, `buildQuery`).
|
||||
|
||||
### Constants
|
||||
|
||||
- Use `PascalCase` for exported constants.
|
||||
- When merging files from different origins into one package, watch out for **name collisions** across files. Prefix to disambiguate when two types share a natural name.
|
||||
|
||||
## How should I organise imports?
|
||||
|
||||
Group imports in three blocks separated by blank lines:
|
||||
|
||||
```go
|
||||
import (
|
||||
// 1. Standard library
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
// 2. External dependencies
|
||||
"github.com/gorilla/mux"
|
||||
|
||||
// 3. Internal
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
)
|
||||
```
|
||||
|
||||
Never introduce circular imports. If package A needs package B and B needs A, extract the shared types into a third package (often under `pkg/types/`).
|
||||
|
||||
## Where do shared types go?
|
||||
|
||||
Most types belong in `pkg/types/` under a domain-specific sub-package (e.g., `pkg/types/ruletypes`, `pkg/types/authtypes`).
|
||||
|
||||
Do not put domain logic in `pkg/types/`. Only data structures, constants, and simple methods.
|
||||
|
||||
## How do I merge or move packages?
|
||||
|
||||
When two packages are tightly coupled (one imports the other's constants, they cover the same domain), merge them:
|
||||
|
||||
1. Pick a domain-specific name for the combined package.
|
||||
2. Prefix files to preserve origin (e.g., `memory_store.go`, `redis_store.go`).
|
||||
3. Resolve symbol conflicts explicitly; rename with a prefix rather than silently shadowing.
|
||||
4. Update all consumers in a single change.
|
||||
5. Delete the old packages. Do not leave behind re-export shims.
|
||||
6. Verify with `go build ./...`, `go test ./<new-pkg>/...`, and `go vet ./...`.
|
||||
|
||||
## When should I add documentation?
|
||||
|
||||
Add a `doc.go` with a package-level comment for any package that is non-trivial or has multiple consumers. Keep it to 1–3 sentences:
|
||||
|
||||
```go
|
||||
// Package cache provides a caching interface with pluggable backends
|
||||
// for in-memory and Redis-based storage.
|
||||
package cache
|
||||
```
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- Package names are domain-specific and lowercase. Never generic names like `util` or `common`.
|
||||
- The file matching the package name (e.g., `cache.go`) defines the public interface. Implementation details go elsewhere.
|
||||
- Never introduce circular imports. Extract shared types into `pkg/types/` when needed.
|
||||
- Watch for symbol name collisions when merging packages, prefix to disambiguate.
|
||||
- Put test helpers in a `{pkg}test/` sub-package, not in the main package.
|
||||
- Before submitting, verify with `go build ./...`, `go test ./<your-pkg>/...`, and `go vet ./...`.
|
||||
- Update all consumers when you rename or move symbols.
|
||||
@@ -8,4 +8,15 @@ We adhere to three primary style guides as our foundation:
|
||||
- [Code Review Comments](https://go.dev/wiki/CodeReviewComments) - For understanding common comments in code reviews
|
||||
- [Google Style Guide](https://google.github.io/styleguide/go/) - Additional practices from Google
|
||||
|
||||
We **recommend** (almost enforce) reviewing these guides before contributing to the codebase. They provide valuable insights into writing idiomatic Go code and will help you understand our approach to backend development. In addition, we have a few additional rules that make certain areas stricter than the above which can be found in area-specific files in this package.
|
||||
We **recommend** (almost enforce) reviewing these guides before contributing to the codebase. They provide valuable insights into writing idiomatic Go code and will help you understand our approach to backend development. In addition, we have a few additional rules that make certain areas stricter than the above which can be found in area-specific files in this package:
|
||||
|
||||
- [Abstractions](abstractions.md) - When to introduce new types and intermediate representations
|
||||
- [Errors](errors.md) - Structured error handling
|
||||
- [Endpoint](endpoint.md) - HTTP endpoint patterns
|
||||
- [Flagger](flagger.md) - Feature flag patterns
|
||||
- [Handler](handler.md) - HTTP handler patterns
|
||||
- [Integration](integration.md) - Integration testing
|
||||
- [Provider](provider.md) - Dependency injection and provider patterns
|
||||
- [Packages](packages.md) - Naming, layout, and conventions for `pkg/` packages
|
||||
- [Service](service.md) - Managed service lifecycle with `factory.Service`
|
||||
- [SQL](sql.md) - Database and SQL patterns
|
||||
|
||||
269
docs/contributing/go/service.md
Normal file
269
docs/contributing/go/service.md
Normal file
@@ -0,0 +1,269 @@
|
||||
# Service
|
||||
|
||||
A service is a component with a managed lifecycle: it starts, runs for the lifetime of the application, and stops gracefully.
|
||||
|
||||
Services are distinct from [providers](provider.md). A provider adapts an external dependency behind an interface. A service has a managed lifecycle that is tied to the lifetime of the application.
|
||||
|
||||
## When do you need a service?
|
||||
|
||||
You need a service when your component needs to do work that outlives a single method call:
|
||||
|
||||
- **Periodic work**: polling an external system, garbage-collecting expired data, syncing state on an interval.
|
||||
- **Graceful shutdown**: holding resources (connections, caches, buffers) that must be flushed or closed before the process exits.
|
||||
- **Blocking on readiness**: waiting for an external dependency to become available before the application can proceed.
|
||||
|
||||
If your component only responds to calls and holds no state that requires cleanup, it is a provider, not a service. If it does both (responds to calls *and* needs a lifecycle), embed `factory.Service` in the provider interface; see [How to create a service](#how-to-create-a-service).
|
||||
|
||||
## The interface
|
||||
|
||||
The `factory.Service` interface in `pkg/factory/service.go` defines two methods:
|
||||
|
||||
```go
|
||||
type Service interface {
|
||||
// Starts a service. It should block and should not return until the service is stopped or it fails.
|
||||
Start(context.Context) error
|
||||
// Stops a service.
|
||||
Stop(context.Context) error
|
||||
}
|
||||
```
|
||||
|
||||
`Start` **must block**. It should not return until the service is stopped (returning `nil`) or something goes wrong (returning an error). If `Start` returns an error, the entire application shuts down.
|
||||
|
||||
`Stop` should cause `Start` to unblock and return. It must be safe to call from a different goroutine than the one running `Start`.
|
||||
|
||||
## Shutdown coordination
|
||||
|
||||
Every service uses a `stopC chan struct{}` to coordinate shutdown:
|
||||
|
||||
- **Constructor**: `stopC: make(chan struct{})`
|
||||
- **Start**: blocks on `<-stopC` (or uses it in a `select` loop)
|
||||
- **Stop**: `close(stopC)` to unblock `Start`
|
||||
|
||||
This is the standard pattern. Do not use `context.WithCancel` or other mechanisms for service-level shutdown coordination. See the examples in the next section.
|
||||
|
||||
## Service shapes
|
||||
|
||||
Two shapes recur across the codebase (these are not exhaustive, if a new shape is needed, bring it up for discussion before going ahead with the implementation), implemented by convention rather than base classes.
|
||||
|
||||
### Idle service
|
||||
|
||||
The service does work during startup or shutdown but has nothing to do while running. `Start` blocks on `<-stopC`. `Stop` closes `stopC` and optionally does cleanup.
|
||||
|
||||
The JWT tokenizer (`pkg/tokenizer/jwttokenizer/provider.go`) is a good example. It validates and creates tokens on demand via method calls, but has no periodic work to do. It still needs the service lifecycle so the registry can manage its lifetime:
|
||||
|
||||
```go
|
||||
// pkg/tokenizer/jwttokenizer/provider.go
|
||||
|
||||
func (provider *provider) Start(ctx context.Context) error {
|
||||
<-provider.stopC
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Stop(ctx context.Context) error {
|
||||
close(provider.stopC)
|
||||
return nil
|
||||
}
|
||||
```
|
||||
|
||||
The instrumentation SDK (`pkg/instrumentation/sdk.go`) is idle while running but does real cleanup in `Stop` shutting down its OpenTelemetry tracer and meter providers:
|
||||
|
||||
```go
|
||||
// pkg/instrumentation/sdk.go
|
||||
|
||||
func (i *SDK) Start(ctx context.Context) error {
|
||||
<-i.startCh
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *SDK) Stop(ctx context.Context) error {
|
||||
close(i.startCh)
|
||||
return errors.Join(
|
||||
i.sdk.Shutdown(ctx),
|
||||
i.meterProviderShutdownFunc(ctx),
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
### Scheduled service
|
||||
|
||||
The service runs an operation repeatedly on a fixed interval. `Start` runs a ticker loop with a `select` on `stopC` and the ticker channel.
|
||||
|
||||
The opaque tokenizer (`pkg/tokenizer/opaquetokenizer/provider.go`) garbage-collects expired tokens and flushes cached last-observed-at timestamps to the database on a configurable interval:
|
||||
|
||||
```go
|
||||
// pkg/tokenizer/opaquetokenizer/provider.go
|
||||
|
||||
func (provider *provider) Start(ctx context.Context) error {
|
||||
ticker := time.NewTicker(provider.config.Opaque.GC.Interval)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-provider.stopC:
|
||||
return nil
|
||||
case <-ticker.C:
|
||||
orgs, err := provider.orgGetter.ListByOwnedKeyRange(ctx)
|
||||
if err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "failed to get orgs data", "error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, org := range orgs {
|
||||
if err := provider.gc(ctx, org); err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "failed to garbage collect tokens", "error", err, "org_id", org.ID)
|
||||
}
|
||||
|
||||
if err := provider.flushLastObservedAt(ctx, org); err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "failed to flush tokens", "error", err, "org_id", org.ID)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Its `Stop` does a final gc and flush before returning, so no data is lost on shutdown:
|
||||
|
||||
```go
|
||||
// pkg/tokenizer/opaquetokenizer/provider.go
|
||||
|
||||
func (provider *provider) Stop(ctx context.Context) error {
|
||||
close(provider.stopC)
|
||||
|
||||
orgs, err := provider.orgGetter.ListByOwnedKeyRange(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, org := range orgs {
|
||||
if err := provider.gc(ctx, org); err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "failed to garbage collect tokens", "error", err, "org_id", org.ID)
|
||||
}
|
||||
|
||||
if err := provider.flushLastObservedAt(ctx, org); err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "failed to flush tokens", "error", err, "org_id", org.ID)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
```
|
||||
|
||||
The key points:
|
||||
|
||||
- In the loop, `select` on `stopC` and the ticker. Errors in iterations are logged but do not cause the service to return (which would shut down the application).
|
||||
- Only return an error from `Start` if the failure is unrecoverable.
|
||||
- Use `Stop` to flush or drain any in-memory state before the process exits.
|
||||
|
||||
## How to create a service
|
||||
|
||||
There are two cases: a standalone service and a provider that is also a service.
|
||||
|
||||
### Standalone service
|
||||
|
||||
A standalone service only has the `factory.Service` lifecycle i.e it does not serve as a dependency for other packages. The user reconciliation service is an example.
|
||||
|
||||
1. Define the service interface in your package. Embed `factory.Service`:
|
||||
|
||||
```go
|
||||
// pkg/modules/user/service.go
|
||||
package user
|
||||
|
||||
type Service interface {
|
||||
factory.Service
|
||||
}
|
||||
```
|
||||
|
||||
2. Create the implementation in an `impl` sub-package. Use an unexported struct with an exported constructor that returns the interface:
|
||||
|
||||
```go
|
||||
// pkg/modules/user/impluser/service.go
|
||||
package impluser
|
||||
|
||||
type service struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
// ... dependencies ...
|
||||
stopC chan struct{}
|
||||
}
|
||||
|
||||
func NewService(
|
||||
providerSettings factory.ProviderSettings,
|
||||
// ... dependencies ...
|
||||
) user.Service {
|
||||
return &service{
|
||||
settings: factory.NewScopedProviderSettings(providerSettings, "go.signoz.io/pkg/modules/user"),
|
||||
// ... dependencies ...
|
||||
stopC: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *service) Start(ctx context.Context) error { ... }
|
||||
func (s *service) Stop(ctx context.Context) error { ... }
|
||||
```
|
||||
|
||||
### Provider that is also a service
|
||||
|
||||
Many providers need a managed lifecycle: they poll, sync, or garbage-collect in the background. In this case, embed `factory.Service` in the provider interface. The implementation satisfies both the provider methods and `Start`/`Stop`.
|
||||
|
||||
```go
|
||||
// pkg/tokenizer/tokenizer.go
|
||||
package tokenizer
|
||||
|
||||
type Tokenizer interface {
|
||||
factory.Service
|
||||
CreateToken(context.Context, *authtypes.Identity, map[string]string) (*authtypes.Token, error)
|
||||
GetIdentity(context.Context, string) (*authtypes.Identity, error)
|
||||
// ... other methods ...
|
||||
}
|
||||
```
|
||||
|
||||
The implementation (e.g. `pkg/tokenizer/opaquetokenizer/provider.go`) implements `Start`, `Stop`, and all the provider methods on the same struct. See the [provider guide](provider.md) for how to set up the factory, config, and constructor. The `stopC` channel and `Start`/`Stop` methods follow the same patterns described above.
|
||||
|
||||
## How to wire it up
|
||||
|
||||
Wiring happens in `pkg/signoz/signoz.go`.
|
||||
|
||||
### 1. Instantiate the service
|
||||
|
||||
For a standalone service, call the constructor directly:
|
||||
|
||||
```go
|
||||
userService := impluser.NewService(providerSettings, store, module, orgGetter, authz, config.User.Root)
|
||||
```
|
||||
|
||||
For a provider that is also a service, use `factory.NewProviderFromNamedMap` as described in the [provider guide](provider.md). The returned value already implements `factory.Service`.
|
||||
|
||||
### 2. Register in the registry
|
||||
|
||||
Wrap the service with `factory.NewNamedService` and pass it to `factory.NewRegistry`:
|
||||
|
||||
```go
|
||||
registry, err := factory.NewRegistry(
|
||||
instrumentation.Logger(),
|
||||
// ... other services ...
|
||||
factory.NewNamedService(factory.MustNewName("user"), userService),
|
||||
)
|
||||
```
|
||||
|
||||
The name must be unique across all services. The registry handles the rest:
|
||||
|
||||
- **Start**: launches all services concurrently in goroutines.
|
||||
- **Wait**: blocks until a service returns an error, the context is cancelled, or a SIGINT/SIGTERM is received. Any service error triggers application shutdown.
|
||||
- **Stop**: stops all services concurrently, collects errors via `errors.Join`.
|
||||
|
||||
You do not call `Start` or `Stop` on individual services. The registry does it.
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- A service has a managed lifecycle: `Start` blocks, `Stop` unblocks it.
|
||||
- Use `stopC chan struct{}` for shutdown coordination. `close(stopC)` in `Stop`, `<-stopC` in `Start`.
|
||||
- Service shapes: idle (block on `stopC`) and scheduled (ticker loop with `select`).
|
||||
- Unexported struct, exported `NewService` constructor returning the interface.
|
||||
- First constructor parameter is `factory.ProviderSettings`. Create scoped settings with `factory.NewScopedProviderSettings`.
|
||||
- Register in `factory.Registry` with `factory.NewNamedService`. The registry starts and stops everything.
|
||||
- Only return an error from `Start` if the failure is unrecoverable. Log and continue for transient errors in polling loops.
|
||||
|
||||
## Further reading
|
||||
|
||||
- [Google Guava - ServiceExplained](https://github.com/google/guava/wiki/ServiceExplained) - the service lifecycle pattern takes inspiration from this
|
||||
- [OpenTelemetry Collector](https://github.com/open-telemetry/opentelemetry-collector) - Worth studying for its approach to building composable components
|
||||
@@ -98,16 +98,20 @@ func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.
|
||||
return provider.pkgAuthzService.ListByOrgIDAndNames(ctx, orgID, names)
|
||||
}
|
||||
|
||||
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
|
||||
return provider.pkgAuthzService.Grant(ctx, orgID, name, subject)
|
||||
func (provider *provider) ListByOrgIDAndIDs(ctx context.Context, orgID valuer.UUID, ids []valuer.UUID) ([]*roletypes.Role, error) {
|
||||
return provider.pkgAuthzService.ListByOrgIDAndIDs(ctx, orgID, ids)
|
||||
}
|
||||
|
||||
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
|
||||
return provider.pkgAuthzService.ModifyGrant(ctx, orgID, existingRoleName, updatedRoleName, subject)
|
||||
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, names []string, subject string) error {
|
||||
return provider.pkgAuthzService.Grant(ctx, orgID, names, subject)
|
||||
}
|
||||
|
||||
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
|
||||
return provider.pkgAuthzService.Revoke(ctx, orgID, name, subject)
|
||||
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleNames []string, updatedRoleNames []string, subject string) error {
|
||||
return provider.pkgAuthzService.ModifyGrant(ctx, orgID, existingRoleNames, updatedRoleNames, subject)
|
||||
}
|
||||
|
||||
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, names []string, subject string) error {
|
||||
return provider.pkgAuthzService.Revoke(ctx, orgID, names, subject)
|
||||
}
|
||||
|
||||
func (provider *provider) CreateManagedRoles(ctx context.Context, orgID valuer.UUID, managedRoles []*roletypes.Role) error {
|
||||
|
||||
@@ -2,7 +2,11 @@
|
||||
* ESLint Configuration for SigNoz Frontend
|
||||
*/
|
||||
module.exports = {
|
||||
ignorePatterns: ['src/parser/*.ts', 'scripts/update-registry.js'],
|
||||
ignorePatterns: [
|
||||
'src/parser/*.ts',
|
||||
'scripts/update-registry.js',
|
||||
'scripts/generate-permissions-type.js',
|
||||
],
|
||||
env: {
|
||||
browser: true,
|
||||
es2021: true,
|
||||
|
||||
@@ -19,6 +19,8 @@ const config: Config.InitialOptions = {
|
||||
'^.*/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
||||
'^@signozhq/icons$':
|
||||
'<rootDir>/node_modules/@signozhq/icons/dist/index.esm.js',
|
||||
'^react-syntax-highlighter/dist/esm/(.*)$':
|
||||
'<rootDir>/node_modules/react-syntax-highlighter/dist/cjs/$1',
|
||||
},
|
||||
globals: {
|
||||
extensionsToTreatAsEsm: ['.ts'],
|
||||
|
||||
@@ -19,7 +19,8 @@
|
||||
"commitlint": "commitlint --edit $1",
|
||||
"test": "jest",
|
||||
"test:changedsince": "jest --changedSince=main --coverage --silent",
|
||||
"generate:api": "orval --config ./orval.config.ts && sh scripts/post-types-generation.sh"
|
||||
"generate:api": "orval --config ./orval.config.ts && sh scripts/post-types-generation.sh",
|
||||
"generate:permissions-type": "node scripts/generate-permissions-type.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.15.0"
|
||||
@@ -214,7 +215,7 @@
|
||||
"@types/react-redux": "^7.1.11",
|
||||
"@types/react-resizable": "3.0.3",
|
||||
"@types/react-router-dom": "^5.1.6",
|
||||
"@types/react-syntax-highlighter": "15.5.7",
|
||||
"@types/react-syntax-highlighter": "15.5.13",
|
||||
"@types/redux-mock-store": "1.0.4",
|
||||
"@types/styled-components": "^5.1.4",
|
||||
"@types/uuid": "^8.3.1",
|
||||
|
||||
13
frontend/scripts/extract-md-languages.sh
Executable file
13
frontend/scripts/extract-md-languages.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
# Extracts unique fenced code block language identifiers from all .md files under frontend/src/
|
||||
# Usage: bash frontend/scripts/extract-md-languages.sh
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
SRC_DIR="$SCRIPT_DIR/../src"
|
||||
|
||||
grep -roh '```[a-zA-Z0-9_+-]*' "$SRC_DIR" --include='*.md' \
|
||||
| sed 's/^```//' \
|
||||
| grep -v '^$' \
|
||||
| sort -u
|
||||
199
frontend/scripts/generate-permissions-type.js
Executable file
199
frontend/scripts/generate-permissions-type.js
Executable file
@@ -0,0 +1,199 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { execSync } = require('child_process');
|
||||
const axios = require('axios');
|
||||
|
||||
const PERMISSIONS_TYPE_FILE = path.join(
|
||||
__dirname,
|
||||
'../src/hooks/useAuthZ/permissions.type.ts',
|
||||
);
|
||||
|
||||
const SIGNOZ_INTEGRATION_IMAGE = 'signoz:integration';
|
||||
const LOCAL_BACKEND_URL = 'http://localhost:8080';
|
||||
|
||||
function log(message) {
|
||||
console.log(`[generate-permissions-type] ${message}`);
|
||||
}
|
||||
|
||||
function getBackendUrlFromDocker() {
|
||||
try {
|
||||
const output = execSync(
|
||||
`docker ps --filter "ancestor=${SIGNOZ_INTEGRATION_IMAGE}" --format "{{.Ports}}"`,
|
||||
{ encoding: 'utf8', stdio: ['pipe', 'pipe', 'pipe'] },
|
||||
).trim();
|
||||
|
||||
if (!output) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const portMatch = output.match(/0\.0\.0\.0:(\d+)->8080\/tcp/);
|
||||
if (portMatch) {
|
||||
return `http://localhost:${portMatch[1]}`;
|
||||
}
|
||||
|
||||
const ipv6Match = output.match(/:::(\d+)->8080\/tcp/);
|
||||
if (ipv6Match) {
|
||||
return `http://localhost:${ipv6Match[1]}`;
|
||||
}
|
||||
} catch (err) {
|
||||
log(`Warning: Could not get port from docker: ${err.message}`);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
async function checkBackendHealth(url, maxAttempts = 3, delayMs = 1000) {
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||
try {
|
||||
await axios.get(`${url}/api/v1/health`, {
|
||||
timeout: 5000,
|
||||
validateStatus: (status) => status === 200,
|
||||
});
|
||||
return true;
|
||||
} catch (err) {
|
||||
if (attempt < maxAttempts) {
|
||||
await new Promise((r) => setTimeout(r, delayMs));
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
async function discoverBackendUrl() {
|
||||
const dockerUrl = getBackendUrlFromDocker();
|
||||
if (dockerUrl) {
|
||||
log(`Found ${SIGNOZ_INTEGRATION_IMAGE} container, trying ${dockerUrl}...`);
|
||||
if (await checkBackendHealth(dockerUrl)) {
|
||||
log(`Backend found at ${dockerUrl} (from py-test-setup)`);
|
||||
return dockerUrl;
|
||||
}
|
||||
log(`Backend at ${dockerUrl} is not responding`);
|
||||
}
|
||||
|
||||
log(`Trying local backend at ${LOCAL_BACKEND_URL}...`);
|
||||
if (await checkBackendHealth(LOCAL_BACKEND_URL)) {
|
||||
log(`Backend found at ${LOCAL_BACKEND_URL}`);
|
||||
return LOCAL_BACKEND_URL;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async function fetchResources(backendUrl) {
|
||||
log('Fetching resources from API...');
|
||||
const resourcesUrl = `${backendUrl}/api/v1/authz/resources`;
|
||||
|
||||
const { data: response } = await axios.get(resourcesUrl);
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
function transformResponse(apiResponse) {
|
||||
if (!apiResponse.data) {
|
||||
throw new Error('Invalid API response: missing data field');
|
||||
}
|
||||
|
||||
const { resources, relations } = apiResponse.data;
|
||||
|
||||
return {
|
||||
status: apiResponse.status || 'success',
|
||||
data: {
|
||||
resources: resources,
|
||||
relations: relations,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function generateTypeScriptFile(data) {
|
||||
const resourcesStr = data.data.resources
|
||||
.map(
|
||||
(r) =>
|
||||
`\t\t\t{\n\t\t\t\tname: '${r.name}',\n\t\t\t\ttype: '${r.type}',\n\t\t\t}`,
|
||||
)
|
||||
.join(',\n');
|
||||
|
||||
const relationsStr = Object.entries(data.data.relations)
|
||||
.map(
|
||||
([type, relations]) =>
|
||||
`\t\t\t${type}: [${relations.map((r) => `'${r}'`).join(', ')}]`,
|
||||
)
|
||||
.join(',\n');
|
||||
|
||||
return `// AUTO GENERATED FILE - DO NOT EDIT - GENERATED BY scripts/generate-permissions-type
|
||||
export default {
|
||||
\tstatus: '${data.status}',
|
||||
\tdata: {
|
||||
\t\tresources: [
|
||||
${resourcesStr}
|
||||
\t\t],
|
||||
\t\trelations: {
|
||||
${relationsStr}
|
||||
\t\t},
|
||||
\t},
|
||||
} as const;
|
||||
`;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
log('Starting permissions type generation...');
|
||||
|
||||
const backendUrl = await discoverBackendUrl();
|
||||
|
||||
if (!backendUrl) {
|
||||
console.error('\n' + '='.repeat(80));
|
||||
console.error('ERROR: No running SigNoz backend found!');
|
||||
console.error('='.repeat(80));
|
||||
console.error(
|
||||
'\nThe permissions type generator requires a running SigNoz backend.',
|
||||
);
|
||||
console.error('\nFor local development, start the backend with:');
|
||||
console.error(' make go-run-enterprise');
|
||||
console.error(
|
||||
'\nFor CI or integration testing, start the test environment with:',
|
||||
);
|
||||
console.error(' make py-test-setup');
|
||||
console.error(
|
||||
'\nIf running in CI and seeing this error, check that the py-test-setup',
|
||||
);
|
||||
console.error('step completed successfully before this step runs.');
|
||||
console.error('='.repeat(80) + '\n');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
log('Fetching resources...');
|
||||
const apiResponse = await fetchResources(backendUrl);
|
||||
|
||||
log('Transforming response...');
|
||||
const transformed = transformResponse(apiResponse);
|
||||
|
||||
log('Generating TypeScript file...');
|
||||
const content = generateTypeScriptFile(transformed);
|
||||
|
||||
log(`Writing to ${PERMISSIONS_TYPE_FILE}...`);
|
||||
fs.writeFileSync(PERMISSIONS_TYPE_FILE, content, 'utf8');
|
||||
|
||||
const rootDir = path.join(__dirname, '../..');
|
||||
const relativePath = path.relative(
|
||||
path.join(rootDir, 'frontend'),
|
||||
PERMISSIONS_TYPE_FILE,
|
||||
);
|
||||
log('Linting generated file...');
|
||||
execSync(`cd frontend && yarn eslint --fix ${relativePath}`, {
|
||||
cwd: rootDir,
|
||||
stdio: 'inherit',
|
||||
});
|
||||
|
||||
log('Successfully generated permissions.type.ts');
|
||||
} catch (error) {
|
||||
log(`Error: ${error.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main();
|
||||
}
|
||||
|
||||
module.exports = { main };
|
||||
35
frontend/scripts/validate-md-languages.sh
Executable file
35
frontend/scripts/validate-md-languages.sh
Executable file
@@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env bash
|
||||
# Validates that all fenced code block languages used in .md files are registered
|
||||
# in the syntax highlighter.
|
||||
# Usage: bash frontend/scripts/validate-md-languages.sh
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
SYNTAX_HIGHLIGHTER="$SCRIPT_DIR/../src/components/MarkdownRenderer/syntaxHighlighter.ts"
|
||||
|
||||
# Get all languages used in .md files
|
||||
md_languages=$("$SCRIPT_DIR/extract-md-languages.sh")
|
||||
|
||||
# Get all registered languages from syntaxHighlighter.ts
|
||||
registered_languages=$(grep -oP "registerLanguage\('\K[^']+" "$SYNTAX_HIGHLIGHTER" | sort -u)
|
||||
|
||||
missing_languages=()
|
||||
|
||||
for lang in $md_languages; do
|
||||
if ! echo "$registered_languages" | grep -qx "$lang"; then
|
||||
missing_languages+=("$lang")
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ${#missing_languages[@]} -gt 0 ]; then
|
||||
echo "Error: The following languages are used in .md files but not registered in syntaxHighlighter.ts:"
|
||||
for lang in "${missing_languages[@]}"; do
|
||||
echo " - $lang"
|
||||
done
|
||||
echo ""
|
||||
echo "Please add them to: frontend/src/components/MarkdownRenderer/syntaxHighlighter.ts"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "All markdown code block languages are registered in syntaxHighlighter.ts"
|
||||
@@ -20,11 +20,8 @@ import { useMutation, useQuery } from 'react-query';
|
||||
import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
|
||||
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
|
||||
import type {
|
||||
HandleExportRawDataGETParams,
|
||||
HandleExportRawDataPOSTParams,
|
||||
ListPromotedAndIndexedPaths200,
|
||||
PromotetypesPromotePathDTO,
|
||||
Querybuildertypesv5QueryRangeRequestDTO,
|
||||
RenderErrorResponseDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
@@ -32,206 +29,6 @@ type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* This endpoints allows simple query exporting raw data for traces and logs
|
||||
* @summary Export raw data
|
||||
*/
|
||||
export const handleExportRawDataGET = (
|
||||
params?: HandleExportRawDataGETParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<string>({
|
||||
url: `/api/v1/export_raw_data`,
|
||||
method: 'GET',
|
||||
params,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getHandleExportRawDataGETQueryKey = (
|
||||
params?: HandleExportRawDataGETParams,
|
||||
) => {
|
||||
return [`/api/v1/export_raw_data`, ...(params ? [params] : [])] as const;
|
||||
};
|
||||
|
||||
export const getHandleExportRawDataGETQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof handleExportRawDataGET>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(
|
||||
params?: HandleExportRawDataGETParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof handleExportRawDataGET>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey =
|
||||
queryOptions?.queryKey ?? getHandleExportRawDataGETQueryKey(params);
|
||||
|
||||
const queryFn: QueryFunction<
|
||||
Awaited<ReturnType<typeof handleExportRawDataGET>>
|
||||
> = ({ signal }) => handleExportRawDataGET(params, signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof handleExportRawDataGET>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type HandleExportRawDataGETQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof handleExportRawDataGET>>
|
||||
>;
|
||||
export type HandleExportRawDataGETQueryError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Export raw data
|
||||
*/
|
||||
|
||||
export function useHandleExportRawDataGET<
|
||||
TData = Awaited<ReturnType<typeof handleExportRawDataGET>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(
|
||||
params?: HandleExportRawDataGETParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof handleExportRawDataGET>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getHandleExportRawDataGETQueryOptions(params, options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Export raw data
|
||||
*/
|
||||
export const invalidateHandleExportRawDataGET = async (
|
||||
queryClient: QueryClient,
|
||||
params?: HandleExportRawDataGETParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getHandleExportRawDataGETQueryKey(params) },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoints allows complex query exporting raw data for traces and logs
|
||||
* @summary Export raw data
|
||||
*/
|
||||
export const handleExportRawDataPOST = (
|
||||
querybuildertypesv5QueryRangeRequestDTO: BodyType<Querybuildertypesv5QueryRangeRequestDTO>,
|
||||
params?: HandleExportRawDataPOSTParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<string>({
|
||||
url: `/api/v1/export_raw_data`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: querybuildertypesv5QueryRangeRequestDTO,
|
||||
params,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getHandleExportRawDataPOSTMutationOptions = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
|
||||
TError,
|
||||
{
|
||||
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
params?: HandleExportRawDataPOSTParams;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
|
||||
TError,
|
||||
{
|
||||
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
params?: HandleExportRawDataPOSTParams;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['handleExportRawDataPOST'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
|
||||
{
|
||||
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
params?: HandleExportRawDataPOSTParams;
|
||||
}
|
||||
> = (props) => {
|
||||
const { data, params } = props ?? {};
|
||||
|
||||
return handleExportRawDataPOST(data, params);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type HandleExportRawDataPOSTMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>
|
||||
>;
|
||||
export type HandleExportRawDataPOSTMutationBody = BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
export type HandleExportRawDataPOSTMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Export raw data
|
||||
*/
|
||||
export const useHandleExportRawDataPOST = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
|
||||
TError,
|
||||
{
|
||||
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
params?: HandleExportRawDataPOSTParams;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
|
||||
TError,
|
||||
{
|
||||
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
params?: HandleExportRawDataPOSTParams;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getHandleExportRawDataPOSTMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoints promotes and indexes paths
|
||||
* @summary Promote and index paths
|
||||
|
||||
973
frontend/src/api/generated/services/serviceaccount/index.ts
Normal file
973
frontend/src/api/generated/services/serviceaccount/index.ts
Normal file
@@ -0,0 +1,973 @@
|
||||
/**
|
||||
* ! Do not edit manually
|
||||
* * The file has been auto-generated using Orval for SigNoz
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
QueryClient,
|
||||
QueryFunction,
|
||||
QueryKey,
|
||||
UseMutationOptions,
|
||||
UseMutationResult,
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
|
||||
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
|
||||
import type {
|
||||
CreateServiceAccount201,
|
||||
CreateServiceAccountKey201,
|
||||
CreateServiceAccountKeyPathParameters,
|
||||
DeleteServiceAccountPathParameters,
|
||||
GetServiceAccount200,
|
||||
GetServiceAccountPathParameters,
|
||||
ListServiceAccountKeys200,
|
||||
ListServiceAccountKeysPathParameters,
|
||||
ListServiceAccounts200,
|
||||
RenderErrorResponseDTO,
|
||||
RevokeServiceAccountKeyPathParameters,
|
||||
ServiceaccounttypesPostableFactorAPIKeyDTO,
|
||||
ServiceaccounttypesPostableServiceAccountDTO,
|
||||
ServiceaccounttypesUpdatableFactorAPIKeyDTO,
|
||||
ServiceaccounttypesUpdatableServiceAccountDTO,
|
||||
ServiceaccounttypesUpdatableServiceAccountStatusDTO,
|
||||
UpdateServiceAccountKeyPathParameters,
|
||||
UpdateServiceAccountPathParameters,
|
||||
UpdateServiceAccountStatusPathParameters,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* This endpoint lists the service accounts for an organisation
|
||||
* @summary List service accounts
|
||||
*/
|
||||
export const listServiceAccounts = (signal?: AbortSignal) => {
|
||||
return GeneratedAPIInstance<ListServiceAccounts200>({
|
||||
url: `/api/v1/service_accounts`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getListServiceAccountsQueryKey = () => {
|
||||
return [`/api/v1/service_accounts`] as const;
|
||||
};
|
||||
|
||||
export const getListServiceAccountsQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof listServiceAccounts>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof listServiceAccounts>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
}) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getListServiceAccountsQueryKey();
|
||||
|
||||
const queryFn: QueryFunction<
|
||||
Awaited<ReturnType<typeof listServiceAccounts>>
|
||||
> = ({ signal }) => listServiceAccounts(signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof listServiceAccounts>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type ListServiceAccountsQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof listServiceAccounts>>
|
||||
>;
|
||||
export type ListServiceAccountsQueryError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary List service accounts
|
||||
*/
|
||||
|
||||
export function useListServiceAccounts<
|
||||
TData = Awaited<ReturnType<typeof listServiceAccounts>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof listServiceAccounts>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getListServiceAccountsQueryOptions(options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary List service accounts
|
||||
*/
|
||||
export const invalidateListServiceAccounts = async (
|
||||
queryClient: QueryClient,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getListServiceAccountsQueryKey() },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint creates a service account
|
||||
* @summary Create service account
|
||||
*/
|
||||
export const createServiceAccount = (
|
||||
serviceaccounttypesPostableServiceAccountDTO: BodyType<ServiceaccounttypesPostableServiceAccountDTO>,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<CreateServiceAccount201>({
|
||||
url: `/api/v1/service_accounts`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: serviceaccounttypesPostableServiceAccountDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getCreateServiceAccountMutationOptions = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof createServiceAccount>>,
|
||||
TError,
|
||||
{ data: BodyType<ServiceaccounttypesPostableServiceAccountDTO> },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof createServiceAccount>>,
|
||||
TError,
|
||||
{ data: BodyType<ServiceaccounttypesPostableServiceAccountDTO> },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['createServiceAccount'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof createServiceAccount>>,
|
||||
{ data: BodyType<ServiceaccounttypesPostableServiceAccountDTO> }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return createServiceAccount(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type CreateServiceAccountMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof createServiceAccount>>
|
||||
>;
|
||||
export type CreateServiceAccountMutationBody = BodyType<ServiceaccounttypesPostableServiceAccountDTO>;
|
||||
export type CreateServiceAccountMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Create service account
|
||||
*/
|
||||
export const useCreateServiceAccount = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof createServiceAccount>>,
|
||||
TError,
|
||||
{ data: BodyType<ServiceaccounttypesPostableServiceAccountDTO> },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof createServiceAccount>>,
|
||||
TError,
|
||||
{ data: BodyType<ServiceaccounttypesPostableServiceAccountDTO> },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getCreateServiceAccountMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint deletes an existing service account
|
||||
* @summary Deletes a service account
|
||||
*/
|
||||
export const deleteServiceAccount = ({
|
||||
id,
|
||||
}: DeleteServiceAccountPathParameters) => {
|
||||
return GeneratedAPIInstance<string>({
|
||||
url: `/api/v1/service_accounts/${id}`,
|
||||
method: 'DELETE',
|
||||
});
|
||||
};
|
||||
|
||||
export const getDeleteServiceAccountMutationOptions = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof deleteServiceAccount>>,
|
||||
TError,
|
||||
{ pathParams: DeleteServiceAccountPathParameters },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof deleteServiceAccount>>,
|
||||
TError,
|
||||
{ pathParams: DeleteServiceAccountPathParameters },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['deleteServiceAccount'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof deleteServiceAccount>>,
|
||||
{ pathParams: DeleteServiceAccountPathParameters }
|
||||
> = (props) => {
|
||||
const { pathParams } = props ?? {};
|
||||
|
||||
return deleteServiceAccount(pathParams);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type DeleteServiceAccountMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof deleteServiceAccount>>
|
||||
>;
|
||||
|
||||
export type DeleteServiceAccountMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Deletes a service account
|
||||
*/
|
||||
export const useDeleteServiceAccount = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof deleteServiceAccount>>,
|
||||
TError,
|
||||
{ pathParams: DeleteServiceAccountPathParameters },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof deleteServiceAccount>>,
|
||||
TError,
|
||||
{ pathParams: DeleteServiceAccountPathParameters },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getDeleteServiceAccountMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint gets an existing service account
|
||||
* @summary Gets a service account
|
||||
*/
|
||||
export const getServiceAccount = (
|
||||
{ id }: GetServiceAccountPathParameters,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetServiceAccount200>({
|
||||
url: `/api/v1/service_accounts/${id}`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetServiceAccountQueryKey = ({
|
||||
id,
|
||||
}: GetServiceAccountPathParameters) => {
|
||||
return [`/api/v1/service_accounts/${id}`] as const;
|
||||
};
|
||||
|
||||
export const getGetServiceAccountQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getServiceAccount>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(
|
||||
{ id }: GetServiceAccountPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getServiceAccount>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey =
|
||||
queryOptions?.queryKey ?? getGetServiceAccountQueryKey({ id });
|
||||
|
||||
const queryFn: QueryFunction<
|
||||
Awaited<ReturnType<typeof getServiceAccount>>
|
||||
> = ({ signal }) => getServiceAccount({ id }, signal);
|
||||
|
||||
return {
|
||||
queryKey,
|
||||
queryFn,
|
||||
enabled: !!id,
|
||||
...queryOptions,
|
||||
} as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getServiceAccount>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type GetServiceAccountQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getServiceAccount>>
|
||||
>;
|
||||
export type GetServiceAccountQueryError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Gets a service account
|
||||
*/
|
||||
|
||||
export function useGetServiceAccount<
|
||||
TData = Awaited<ReturnType<typeof getServiceAccount>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(
|
||||
{ id }: GetServiceAccountPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getServiceAccount>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetServiceAccountQueryOptions({ id }, options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Gets a service account
|
||||
*/
|
||||
export const invalidateGetServiceAccount = async (
|
||||
queryClient: QueryClient,
|
||||
{ id }: GetServiceAccountPathParameters,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetServiceAccountQueryKey({ id }) },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint updates an existing service account
|
||||
* @summary Updates a service account
|
||||
*/
|
||||
export const updateServiceAccount = (
|
||||
{ id }: UpdateServiceAccountPathParameters,
|
||||
serviceaccounttypesUpdatableServiceAccountDTO: BodyType<ServiceaccounttypesUpdatableServiceAccountDTO>,
|
||||
) => {
|
||||
return GeneratedAPIInstance<string>({
|
||||
url: `/api/v1/service_accounts/${id}`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: serviceaccounttypesUpdatableServiceAccountDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getUpdateServiceAccountMutationOptions = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateServiceAccount>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServiceAccountPathParameters;
|
||||
data: BodyType<ServiceaccounttypesUpdatableServiceAccountDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateServiceAccount>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServiceAccountPathParameters;
|
||||
data: BodyType<ServiceaccounttypesUpdatableServiceAccountDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['updateServiceAccount'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof updateServiceAccount>>,
|
||||
{
|
||||
pathParams: UpdateServiceAccountPathParameters;
|
||||
data: BodyType<ServiceaccounttypesUpdatableServiceAccountDTO>;
|
||||
}
|
||||
> = (props) => {
|
||||
const { pathParams, data } = props ?? {};
|
||||
|
||||
return updateServiceAccount(pathParams, data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type UpdateServiceAccountMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof updateServiceAccount>>
|
||||
>;
|
||||
export type UpdateServiceAccountMutationBody = BodyType<ServiceaccounttypesUpdatableServiceAccountDTO>;
|
||||
export type UpdateServiceAccountMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Updates a service account
|
||||
*/
|
||||
export const useUpdateServiceAccount = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateServiceAccount>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServiceAccountPathParameters;
|
||||
data: BodyType<ServiceaccounttypesUpdatableServiceAccountDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof updateServiceAccount>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServiceAccountPathParameters;
|
||||
data: BodyType<ServiceaccounttypesUpdatableServiceAccountDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getUpdateServiceAccountMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint lists the service account keys
|
||||
* @summary List service account keys
|
||||
*/
|
||||
export const listServiceAccountKeys = (
|
||||
{ id }: ListServiceAccountKeysPathParameters,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<ListServiceAccountKeys200>({
|
||||
url: `/api/v1/service_accounts/${id}/keys`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getListServiceAccountKeysQueryKey = ({
|
||||
id,
|
||||
}: ListServiceAccountKeysPathParameters) => {
|
||||
return [`/api/v1/service_accounts/${id}/keys`] as const;
|
||||
};
|
||||
|
||||
export const getListServiceAccountKeysQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof listServiceAccountKeys>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(
|
||||
{ id }: ListServiceAccountKeysPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof listServiceAccountKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey =
|
||||
queryOptions?.queryKey ?? getListServiceAccountKeysQueryKey({ id });
|
||||
|
||||
const queryFn: QueryFunction<
|
||||
Awaited<ReturnType<typeof listServiceAccountKeys>>
|
||||
> = ({ signal }) => listServiceAccountKeys({ id }, signal);
|
||||
|
||||
return {
|
||||
queryKey,
|
||||
queryFn,
|
||||
enabled: !!id,
|
||||
...queryOptions,
|
||||
} as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof listServiceAccountKeys>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type ListServiceAccountKeysQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof listServiceAccountKeys>>
|
||||
>;
|
||||
export type ListServiceAccountKeysQueryError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary List service account keys
|
||||
*/
|
||||
|
||||
export function useListServiceAccountKeys<
|
||||
TData = Awaited<ReturnType<typeof listServiceAccountKeys>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(
|
||||
{ id }: ListServiceAccountKeysPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof listServiceAccountKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getListServiceAccountKeysQueryOptions({ id }, options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary List service account keys
|
||||
*/
|
||||
export const invalidateListServiceAccountKeys = async (
|
||||
queryClient: QueryClient,
|
||||
{ id }: ListServiceAccountKeysPathParameters,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getListServiceAccountKeysQueryKey({ id }) },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint creates a service account key
|
||||
* @summary Create a service account key
|
||||
*/
|
||||
export const createServiceAccountKey = (
|
||||
{ id }: CreateServiceAccountKeyPathParameters,
|
||||
serviceaccounttypesPostableFactorAPIKeyDTO: BodyType<ServiceaccounttypesPostableFactorAPIKeyDTO>,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<CreateServiceAccountKey201>({
|
||||
url: `/api/v1/service_accounts/${id}/keys`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: serviceaccounttypesPostableFactorAPIKeyDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getCreateServiceAccountKeyMutationOptions = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof createServiceAccountKey>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: CreateServiceAccountKeyPathParameters;
|
||||
data: BodyType<ServiceaccounttypesPostableFactorAPIKeyDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof createServiceAccountKey>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: CreateServiceAccountKeyPathParameters;
|
||||
data: BodyType<ServiceaccounttypesPostableFactorAPIKeyDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['createServiceAccountKey'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof createServiceAccountKey>>,
|
||||
{
|
||||
pathParams: CreateServiceAccountKeyPathParameters;
|
||||
data: BodyType<ServiceaccounttypesPostableFactorAPIKeyDTO>;
|
||||
}
|
||||
> = (props) => {
|
||||
const { pathParams, data } = props ?? {};
|
||||
|
||||
return createServiceAccountKey(pathParams, data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type CreateServiceAccountKeyMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof createServiceAccountKey>>
|
||||
>;
|
||||
export type CreateServiceAccountKeyMutationBody = BodyType<ServiceaccounttypesPostableFactorAPIKeyDTO>;
|
||||
export type CreateServiceAccountKeyMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Create a service account key
|
||||
*/
|
||||
export const useCreateServiceAccountKey = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof createServiceAccountKey>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: CreateServiceAccountKeyPathParameters;
|
||||
data: BodyType<ServiceaccounttypesPostableFactorAPIKeyDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof createServiceAccountKey>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: CreateServiceAccountKeyPathParameters;
|
||||
data: BodyType<ServiceaccounttypesPostableFactorAPIKeyDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getCreateServiceAccountKeyMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint revokes an existing service account key
|
||||
* @summary Revoke a service account key
|
||||
*/
|
||||
export const revokeServiceAccountKey = ({
|
||||
id,
|
||||
fid,
|
||||
}: RevokeServiceAccountKeyPathParameters) => {
|
||||
return GeneratedAPIInstance<string>({
|
||||
url: `/api/v1/service_accounts/${id}/keys/${fid}`,
|
||||
method: 'DELETE',
|
||||
});
|
||||
};
|
||||
|
||||
export const getRevokeServiceAccountKeyMutationOptions = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof revokeServiceAccountKey>>,
|
||||
TError,
|
||||
{ pathParams: RevokeServiceAccountKeyPathParameters },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof revokeServiceAccountKey>>,
|
||||
TError,
|
||||
{ pathParams: RevokeServiceAccountKeyPathParameters },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['revokeServiceAccountKey'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof revokeServiceAccountKey>>,
|
||||
{ pathParams: RevokeServiceAccountKeyPathParameters }
|
||||
> = (props) => {
|
||||
const { pathParams } = props ?? {};
|
||||
|
||||
return revokeServiceAccountKey(pathParams);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type RevokeServiceAccountKeyMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof revokeServiceAccountKey>>
|
||||
>;
|
||||
|
||||
export type RevokeServiceAccountKeyMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Revoke a service account key
|
||||
*/
|
||||
export const useRevokeServiceAccountKey = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof revokeServiceAccountKey>>,
|
||||
TError,
|
||||
{ pathParams: RevokeServiceAccountKeyPathParameters },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof revokeServiceAccountKey>>,
|
||||
TError,
|
||||
{ pathParams: RevokeServiceAccountKeyPathParameters },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getRevokeServiceAccountKeyMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint updates an existing service account key
|
||||
* @summary Updates a service account key
|
||||
*/
|
||||
export const updateServiceAccountKey = (
|
||||
{ id, fid }: UpdateServiceAccountKeyPathParameters,
|
||||
serviceaccounttypesUpdatableFactorAPIKeyDTO: BodyType<ServiceaccounttypesUpdatableFactorAPIKeyDTO>,
|
||||
) => {
|
||||
return GeneratedAPIInstance<string>({
|
||||
url: `/api/v1/service_accounts/${id}/keys/${fid}`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: serviceaccounttypesUpdatableFactorAPIKeyDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getUpdateServiceAccountKeyMutationOptions = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateServiceAccountKey>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServiceAccountKeyPathParameters;
|
||||
data: BodyType<ServiceaccounttypesUpdatableFactorAPIKeyDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateServiceAccountKey>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServiceAccountKeyPathParameters;
|
||||
data: BodyType<ServiceaccounttypesUpdatableFactorAPIKeyDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['updateServiceAccountKey'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof updateServiceAccountKey>>,
|
||||
{
|
||||
pathParams: UpdateServiceAccountKeyPathParameters;
|
||||
data: BodyType<ServiceaccounttypesUpdatableFactorAPIKeyDTO>;
|
||||
}
|
||||
> = (props) => {
|
||||
const { pathParams, data } = props ?? {};
|
||||
|
||||
return updateServiceAccountKey(pathParams, data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type UpdateServiceAccountKeyMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof updateServiceAccountKey>>
|
||||
>;
|
||||
export type UpdateServiceAccountKeyMutationBody = BodyType<ServiceaccounttypesUpdatableFactorAPIKeyDTO>;
|
||||
export type UpdateServiceAccountKeyMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Updates a service account key
|
||||
*/
|
||||
export const useUpdateServiceAccountKey = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateServiceAccountKey>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServiceAccountKeyPathParameters;
|
||||
data: BodyType<ServiceaccounttypesUpdatableFactorAPIKeyDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof updateServiceAccountKey>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServiceAccountKeyPathParameters;
|
||||
data: BodyType<ServiceaccounttypesUpdatableFactorAPIKeyDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getUpdateServiceAccountKeyMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint updates an existing service account status
|
||||
* @summary Updates a service account status
|
||||
*/
|
||||
export const updateServiceAccountStatus = (
|
||||
{ id }: UpdateServiceAccountStatusPathParameters,
|
||||
serviceaccounttypesUpdatableServiceAccountStatusDTO: BodyType<ServiceaccounttypesUpdatableServiceAccountStatusDTO>,
|
||||
) => {
|
||||
return GeneratedAPIInstance<string>({
|
||||
url: `/api/v1/service_accounts/${id}/status`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: serviceaccounttypesUpdatableServiceAccountStatusDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getUpdateServiceAccountStatusMutationOptions = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateServiceAccountStatus>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServiceAccountStatusPathParameters;
|
||||
data: BodyType<ServiceaccounttypesUpdatableServiceAccountStatusDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateServiceAccountStatus>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServiceAccountStatusPathParameters;
|
||||
data: BodyType<ServiceaccounttypesUpdatableServiceAccountStatusDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['updateServiceAccountStatus'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof updateServiceAccountStatus>>,
|
||||
{
|
||||
pathParams: UpdateServiceAccountStatusPathParameters;
|
||||
data: BodyType<ServiceaccounttypesUpdatableServiceAccountStatusDTO>;
|
||||
}
|
||||
> = (props) => {
|
||||
const { pathParams, data } = props ?? {};
|
||||
|
||||
return updateServiceAccountStatus(pathParams, data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type UpdateServiceAccountStatusMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof updateServiceAccountStatus>>
|
||||
>;
|
||||
export type UpdateServiceAccountStatusMutationBody = BodyType<ServiceaccounttypesUpdatableServiceAccountStatusDTO>;
|
||||
export type UpdateServiceAccountStatusMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Updates a service account status
|
||||
*/
|
||||
export const useUpdateServiceAccountStatus = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateServiceAccountStatus>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServiceAccountStatusPathParameters;
|
||||
data: BodyType<ServiceaccounttypesUpdatableServiceAccountStatusDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof updateServiceAccountStatus>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServiceAccountStatusPathParameters;
|
||||
data: BodyType<ServiceaccounttypesUpdatableServiceAccountStatusDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getUpdateServiceAccountStatusMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
@@ -2090,6 +2090,154 @@ export interface RoletypesRoleDTO {
|
||||
updatedAt?: Date;
|
||||
}
|
||||
|
||||
export interface ServiceaccounttypesFactorAPIKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
createdAt?: Date;
|
||||
/**
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
*/
|
||||
expires_at: number;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
key: string;
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
last_used: Date;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
service_account_id: string;
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
updatedAt?: Date;
|
||||
}
|
||||
|
||||
export interface ServiceaccounttypesGettableFactorAPIKeyWithKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
key: string;
|
||||
}
|
||||
|
||||
export interface ServiceaccounttypesPostableFactorAPIKeyDTO {
|
||||
/**
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
*/
|
||||
expires_at: number;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface ServiceaccounttypesPostableServiceAccountDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
email: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
roles: string[];
|
||||
}
|
||||
|
||||
export interface ServiceaccounttypesServiceAccountDTO {
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
createdAt?: Date;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
email: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
orgID: string;
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
roles: string[];
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
updatedAt?: Date;
|
||||
}
|
||||
|
||||
export interface ServiceaccounttypesUpdatableFactorAPIKeyDTO {
|
||||
/**
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
*/
|
||||
expires_at: number;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface ServiceaccounttypesUpdatableServiceAccountDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
email: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
roles: string[];
|
||||
}
|
||||
|
||||
export interface ServiceaccounttypesUpdatableServiceAccountStatusDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
}
|
||||
|
||||
export enum TelemetrytypesFieldContextDTO {
|
||||
metric = 'metric',
|
||||
log = 'log',
|
||||
@@ -2732,76 +2880,6 @@ export type DeleteAuthDomainPathParameters = {
|
||||
export type UpdateAuthDomainPathParameters = {
|
||||
id: string;
|
||||
};
|
||||
export type HandleExportRawDataGETParams = {
|
||||
/**
|
||||
* @enum csv,jsonl
|
||||
* @type string
|
||||
* @description undefined
|
||||
*/
|
||||
format?: HandleExportRawDataGETFormat;
|
||||
/**
|
||||
* @enum logs,traces
|
||||
* @type string
|
||||
* @description undefined
|
||||
*/
|
||||
source?: HandleExportRawDataGETSource;
|
||||
/**
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
* @description undefined
|
||||
*/
|
||||
start?: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
* @description undefined
|
||||
*/
|
||||
end?: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @maximum 50000
|
||||
* @minimum 1
|
||||
* @description undefined
|
||||
*/
|
||||
limit?: number;
|
||||
/**
|
||||
* @type string
|
||||
* @description undefined
|
||||
*/
|
||||
filter?: string;
|
||||
/**
|
||||
* @type array
|
||||
* @description undefined
|
||||
*/
|
||||
columns?: string[];
|
||||
/**
|
||||
* @type string
|
||||
* @description undefined
|
||||
*/
|
||||
order_by?: string;
|
||||
};
|
||||
|
||||
export enum HandleExportRawDataGETFormat {
|
||||
csv = 'csv',
|
||||
jsonl = 'jsonl',
|
||||
}
|
||||
export enum HandleExportRawDataGETSource {
|
||||
logs = 'logs',
|
||||
traces = 'traces',
|
||||
}
|
||||
export type HandleExportRawDataPOSTParams = {
|
||||
/**
|
||||
* @enum csv,jsonl
|
||||
* @type string
|
||||
* @description undefined
|
||||
*/
|
||||
format?: HandleExportRawDataPOSTFormat;
|
||||
};
|
||||
|
||||
export enum HandleExportRawDataPOSTFormat {
|
||||
csv = 'csv',
|
||||
jsonl = 'jsonl',
|
||||
}
|
||||
export type GetFieldsKeysParams = {
|
||||
/**
|
||||
* @description undefined
|
||||
@@ -3120,6 +3198,78 @@ export type PatchObjectsPathParameters = {
|
||||
id: string;
|
||||
relation: string;
|
||||
};
|
||||
export type ListServiceAccounts200 = {
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
data: ServiceaccounttypesServiceAccountDTO[];
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type CreateServiceAccount201 = {
|
||||
data: TypesIdentifiableDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type DeleteServiceAccountPathParameters = {
|
||||
id: string;
|
||||
};
|
||||
export type GetServiceAccountPathParameters = {
|
||||
id: string;
|
||||
};
|
||||
export type GetServiceAccount200 = {
|
||||
data: ServiceaccounttypesServiceAccountDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type UpdateServiceAccountPathParameters = {
|
||||
id: string;
|
||||
};
|
||||
export type ListServiceAccountKeysPathParameters = {
|
||||
id: string;
|
||||
};
|
||||
export type ListServiceAccountKeys200 = {
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
data: ServiceaccounttypesFactorAPIKeyDTO[];
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type CreateServiceAccountKeyPathParameters = {
|
||||
id: string;
|
||||
};
|
||||
export type CreateServiceAccountKey201 = {
|
||||
data: ServiceaccounttypesGettableFactorAPIKeyWithKeyDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type RevokeServiceAccountKeyPathParameters = {
|
||||
id: string;
|
||||
fid: string;
|
||||
};
|
||||
export type UpdateServiceAccountKeyPathParameters = {
|
||||
id: string;
|
||||
fid: string;
|
||||
};
|
||||
export type UpdateServiceAccountStatusPathParameters = {
|
||||
id: string;
|
||||
};
|
||||
export type ListUsers200 = {
|
||||
/**
|
||||
* @type array
|
||||
|
||||
321
frontend/src/components/GuardAuthZ/GuardAuthZ.test.tsx
Normal file
321
frontend/src/components/GuardAuthZ/GuardAuthZ.test.tsx
Normal file
@@ -0,0 +1,321 @@
|
||||
import { ReactElement } from 'react';
|
||||
import {
|
||||
AuthtypesGettableTransactionDTO,
|
||||
AuthtypesTransactionDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { ENVIRONMENT } from 'constants/env';
|
||||
import { BrandedPermission } from 'hooks/useAuthZ/types';
|
||||
import { buildPermission } from 'hooks/useAuthZ/utils';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { render, screen, waitFor } from 'tests/test-utils';
|
||||
|
||||
import { GuardAuthZ } from './GuardAuthZ';
|
||||
|
||||
const BASE_URL = ENVIRONMENT.baseURL || '';
|
||||
const AUTHZ_CHECK_URL = `${BASE_URL}/api/v1/authz/check`;
|
||||
|
||||
function authzMockResponse(
|
||||
payload: AuthtypesTransactionDTO[],
|
||||
authorizedByIndex: boolean[],
|
||||
): { data: AuthtypesGettableTransactionDTO[]; status: string } {
|
||||
return {
|
||||
data: payload.map((txn, i) => ({
|
||||
relation: txn.relation,
|
||||
object: txn.object,
|
||||
authorized: authorizedByIndex[i] ?? false,
|
||||
})),
|
||||
status: 'success',
|
||||
};
|
||||
}
|
||||
|
||||
describe('GuardAuthZ', () => {
|
||||
const TestChild = (): ReactElement => <div>Protected Content</div>;
|
||||
const LoadingFallback = (): ReactElement => <div>Loading...</div>;
|
||||
const ErrorFallback = (error: Error): ReactElement => (
|
||||
<div>Error occurred: {error.message}</div>
|
||||
);
|
||||
const NoPermissionFallback = (_response: {
|
||||
requiredPermissionName: BrandedPermission;
|
||||
}): ReactElement => <div>Access denied</div>;
|
||||
const NoPermissionFallbackWithSuggestions = (response: {
|
||||
requiredPermissionName: BrandedPermission;
|
||||
}): ReactElement => (
|
||||
<div>
|
||||
Access denied. Required permission: {response.requiredPermissionName}
|
||||
</div>
|
||||
);
|
||||
|
||||
it('should render children when permission is granted', async () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
const payload = await req.json();
|
||||
return res(ctx.status(200), ctx.json(authzMockResponse(payload, [true])));
|
||||
}),
|
||||
);
|
||||
|
||||
render(
|
||||
<GuardAuthZ relation="read" object="dashboard:*">
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Protected Content')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should render fallbackOnLoading when loading', () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (_req, res, ctx) => {
|
||||
return res(
|
||||
ctx.delay('infinite'),
|
||||
ctx.status(200),
|
||||
ctx.json({ data: [], status: 'success' }),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
render(
|
||||
<GuardAuthZ
|
||||
relation="read"
|
||||
object="dashboard:*"
|
||||
fallbackOnLoading={<LoadingFallback />}
|
||||
>
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
expect(screen.getByText('Loading...')).toBeInTheDocument();
|
||||
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render null when loading and no fallbackOnLoading provided', () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (_req, res, ctx) => {
|
||||
return res(
|
||||
ctx.delay('infinite'),
|
||||
ctx.status(200),
|
||||
ctx.json({ data: [], status: 'success' }),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const { container } = render(
|
||||
<GuardAuthZ relation="read" object="dashboard:*">
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
expect(container.firstChild).toBeNull();
|
||||
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render fallbackOnError when API error occurs', async () => {
|
||||
const errorMessage = 'Internal Server Error';
|
||||
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, (_req, res, ctx) => {
|
||||
return res(ctx.status(500), ctx.json({ error: errorMessage }));
|
||||
}),
|
||||
);
|
||||
|
||||
render(
|
||||
<GuardAuthZ
|
||||
relation="read"
|
||||
object="dashboard:*"
|
||||
fallbackOnError={ErrorFallback}
|
||||
>
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/Error occurred:/)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should pass error object to fallbackOnError function', async () => {
|
||||
const errorMessage = 'Network request failed';
|
||||
let receivedError: Error | null = null;
|
||||
|
||||
const errorFallbackWithCapture = (error: Error): ReactElement => {
|
||||
receivedError = error;
|
||||
return <div>Captured error: {error.message}</div>;
|
||||
};
|
||||
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, (_req, res, ctx) => {
|
||||
return res(ctx.status(500), ctx.json({ error: errorMessage }));
|
||||
}),
|
||||
);
|
||||
|
||||
render(
|
||||
<GuardAuthZ
|
||||
relation="read"
|
||||
object="dashboard:*"
|
||||
fallbackOnError={errorFallbackWithCapture}
|
||||
>
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(receivedError).not.toBeNull();
|
||||
});
|
||||
|
||||
expect(receivedError).toBeInstanceOf(Error);
|
||||
expect(screen.getByText(/Captured error:/)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render null when error occurs and no fallbackOnError provided', async () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, (_req, res, ctx) => {
|
||||
return res(ctx.status(500), ctx.json({ error: 'Internal Server Error' }));
|
||||
}),
|
||||
);
|
||||
|
||||
const { container } = render(
|
||||
<GuardAuthZ relation="read" object="dashboard:*">
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(container.firstChild).toBeNull();
|
||||
});
|
||||
|
||||
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render fallbackOnNoPermissions when permission is denied', async () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
const payload = await req.json();
|
||||
return res(ctx.status(200), ctx.json(authzMockResponse(payload, [false])));
|
||||
}),
|
||||
);
|
||||
|
||||
render(
|
||||
<GuardAuthZ
|
||||
relation="update"
|
||||
object="dashboard:123"
|
||||
fallbackOnNoPermissions={NoPermissionFallback}
|
||||
>
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Access denied')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render null when permission is denied and no fallbackOnNoPermissions provided', async () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
const payload = await req.json();
|
||||
return res(ctx.status(200), ctx.json(authzMockResponse(payload, [false])));
|
||||
}),
|
||||
);
|
||||
|
||||
const { container } = render(
|
||||
<GuardAuthZ relation="update" object="dashboard:123">
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(container.firstChild).toBeNull();
|
||||
});
|
||||
|
||||
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render null when permissions object is null', async () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, (_req, res, ctx) => {
|
||||
return res(ctx.status(200), ctx.json({ data: [], status: 'success' }));
|
||||
}),
|
||||
);
|
||||
|
||||
const { container } = render(
|
||||
<GuardAuthZ relation="read" object="dashboard:*">
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(container.firstChild).toBeNull();
|
||||
});
|
||||
|
||||
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should pass requiredPermissionName to fallbackOnNoPermissions', async () => {
|
||||
const permission = buildPermission('update', 'dashboard:123');
|
||||
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
const payload = await req.json();
|
||||
return res(ctx.status(200), ctx.json(authzMockResponse(payload, [false])));
|
||||
}),
|
||||
);
|
||||
|
||||
render(
|
||||
<GuardAuthZ
|
||||
relation="update"
|
||||
object="dashboard:123"
|
||||
fallbackOnNoPermissions={NoPermissionFallbackWithSuggestions}
|
||||
>
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByText(/Access denied. Required permission:/),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(
|
||||
screen.getAllByText(
|
||||
new RegExp(permission.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')),
|
||||
).length,
|
||||
).toBeGreaterThan(0);
|
||||
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should handle different relation and object combinations', async () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
const payload = await req.json();
|
||||
return res(ctx.status(200), ctx.json(authzMockResponse(payload, [true])));
|
||||
}),
|
||||
);
|
||||
|
||||
const { rerender } = render(
|
||||
<GuardAuthZ relation="read" object="dashboard:*">
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Protected Content')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
rerender(
|
||||
<GuardAuthZ relation="delete" object="dashboard:456">
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Protected Content')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
50
frontend/src/components/GuardAuthZ/GuardAuthZ.tsx
Normal file
50
frontend/src/components/GuardAuthZ/GuardAuthZ.tsx
Normal file
@@ -0,0 +1,50 @@
|
||||
import { ReactElement } from 'react';
|
||||
import {
|
||||
AuthZObject,
|
||||
AuthZRelation,
|
||||
BrandedPermission,
|
||||
} from 'hooks/useAuthZ/types';
|
||||
import { useAuthZ } from 'hooks/useAuthZ/useAuthZ';
|
||||
import { buildPermission } from 'hooks/useAuthZ/utils';
|
||||
|
||||
export type GuardAuthZProps<R extends AuthZRelation> = {
|
||||
children: ReactElement;
|
||||
relation: R;
|
||||
object: AuthZObject<R>;
|
||||
fallbackOnLoading?: JSX.Element;
|
||||
fallbackOnError?: (error: Error) => JSX.Element;
|
||||
fallbackOnNoPermissions?: (response: {
|
||||
requiredPermissionName: BrandedPermission;
|
||||
}) => JSX.Element;
|
||||
};
|
||||
|
||||
export function GuardAuthZ<R extends AuthZRelation>({
|
||||
children,
|
||||
relation,
|
||||
object,
|
||||
fallbackOnLoading,
|
||||
fallbackOnError,
|
||||
fallbackOnNoPermissions,
|
||||
}: GuardAuthZProps<R>): JSX.Element | null {
|
||||
const permission = buildPermission<R>(relation, object);
|
||||
|
||||
const { permissions, isLoading, error } = useAuthZ([permission]);
|
||||
|
||||
if (isLoading) {
|
||||
return fallbackOnLoading ?? null;
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return fallbackOnError?.(error) ?? null;
|
||||
}
|
||||
|
||||
if (!permissions?.[permission]?.isGranted) {
|
||||
return (
|
||||
fallbackOnNoPermissions?.({
|
||||
requiredPermissionName: permission,
|
||||
}) ?? null
|
||||
);
|
||||
}
|
||||
|
||||
return children;
|
||||
}
|
||||
@@ -2,13 +2,12 @@
|
||||
|
||||
import ReactMarkdown from 'react-markdown';
|
||||
import { CodeProps } from 'react-markdown/lib/ast-to-react';
|
||||
import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter';
|
||||
import { a11yDark } from 'react-syntax-highlighter/dist/cjs/styles/prism';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import rehypeRaw from 'rehype-raw';
|
||||
|
||||
import CodeCopyBtn from './CodeCopyBtn/CodeCopyBtn';
|
||||
import SyntaxHighlighter, { a11yDark } from './syntaxHighlighter';
|
||||
|
||||
interface LinkProps {
|
||||
href: string;
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
import { PrismLight as SyntaxHighlighter } from 'react-syntax-highlighter';
|
||||
import bash from 'react-syntax-highlighter/dist/esm/languages/prism/bash';
|
||||
import docker from 'react-syntax-highlighter/dist/esm/languages/prism/docker';
|
||||
import elixir from 'react-syntax-highlighter/dist/esm/languages/prism/elixir';
|
||||
import go from 'react-syntax-highlighter/dist/esm/languages/prism/go';
|
||||
import javascript from 'react-syntax-highlighter/dist/esm/languages/prism/javascript';
|
||||
import json from 'react-syntax-highlighter/dist/esm/languages/prism/json';
|
||||
import jsx from 'react-syntax-highlighter/dist/esm/languages/prism/jsx';
|
||||
import rust from 'react-syntax-highlighter/dist/esm/languages/prism/rust';
|
||||
import swift from 'react-syntax-highlighter/dist/esm/languages/prism/swift';
|
||||
import tsx from 'react-syntax-highlighter/dist/esm/languages/prism/tsx';
|
||||
import typescript from 'react-syntax-highlighter/dist/esm/languages/prism/typescript';
|
||||
import yaml from 'react-syntax-highlighter/dist/esm/languages/prism/yaml';
|
||||
import a11yDark from 'react-syntax-highlighter/dist/esm/styles/prism/a11y-dark';
|
||||
|
||||
SyntaxHighlighter.registerLanguage('bash', bash);
|
||||
SyntaxHighlighter.registerLanguage('docker', docker);
|
||||
SyntaxHighlighter.registerLanguage('dockerfile', docker);
|
||||
SyntaxHighlighter.registerLanguage('elixir', elixir);
|
||||
SyntaxHighlighter.registerLanguage('go', go);
|
||||
SyntaxHighlighter.registerLanguage('javascript', javascript);
|
||||
SyntaxHighlighter.registerLanguage('js', javascript);
|
||||
SyntaxHighlighter.registerLanguage('json', json);
|
||||
SyntaxHighlighter.registerLanguage('jsx', jsx);
|
||||
SyntaxHighlighter.registerLanguage('rust', rust);
|
||||
SyntaxHighlighter.registerLanguage('swift', swift);
|
||||
SyntaxHighlighter.registerLanguage('ts', typescript);
|
||||
SyntaxHighlighter.registerLanguage('tsx', tsx);
|
||||
SyntaxHighlighter.registerLanguage('typescript', typescript);
|
||||
SyntaxHighlighter.registerLanguage('yaml', yaml);
|
||||
SyntaxHighlighter.registerLanguage('yml', yaml);
|
||||
|
||||
export default SyntaxHighlighter;
|
||||
export { a11yDark };
|
||||
@@ -0,0 +1,41 @@
|
||||
.guard-authz-error-no-authz {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
|
||||
padding: 24px;
|
||||
|
||||
.guard-authz-error-no-authz-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: flex-start;
|
||||
gap: 8px;
|
||||
max-width: 500px;
|
||||
}
|
||||
|
||||
img {
|
||||
width: 32px;
|
||||
height: 32px;
|
||||
}
|
||||
|
||||
h3 {
|
||||
font-size: 18px;
|
||||
color: var(--l1-foreground);
|
||||
line-height: 18px;
|
||||
}
|
||||
|
||||
p {
|
||||
font-size: 14px;
|
||||
color: var(--l3-foreground);
|
||||
line-height: 18px;
|
||||
|
||||
span {
|
||||
background-color: var(--l3-background);
|
||||
white-space: nowrap;
|
||||
padding: 0 2px;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,472 @@
|
||||
import { ReactElement } from 'react';
|
||||
import type { RouteComponentProps } from 'react-router-dom';
|
||||
import {
|
||||
AuthtypesGettableTransactionDTO,
|
||||
AuthtypesTransactionDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { ENVIRONMENT } from 'constants/env';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { render, screen, waitFor } from 'tests/test-utils';
|
||||
|
||||
import { createGuardedRoute } from './createGuardedRoute';
|
||||
|
||||
const BASE_URL = ENVIRONMENT.baseURL || '';
|
||||
const AUTHZ_CHECK_URL = `${BASE_URL}/api/v1/authz/check`;
|
||||
|
||||
function authzMockResponse(
|
||||
payload: AuthtypesTransactionDTO[],
|
||||
authorizedByIndex: boolean[],
|
||||
): { data: AuthtypesGettableTransactionDTO[]; status: string } {
|
||||
return {
|
||||
data: payload.map((txn, i) => ({
|
||||
relation: txn.relation,
|
||||
object: txn.object,
|
||||
authorized: authorizedByIndex[i] ?? false,
|
||||
})),
|
||||
status: 'success',
|
||||
};
|
||||
}
|
||||
|
||||
describe('createGuardedRoute', () => {
|
||||
const TestComponent = ({ testProp }: { testProp: string }): ReactElement => (
|
||||
<div>Test Component: {testProp}</div>
|
||||
);
|
||||
|
||||
it('should render component when permission is granted', async () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
const payload = await req.json();
|
||||
return res(ctx.status(200), ctx.json(authzMockResponse(payload, [true])));
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'read',
|
||||
'dashboard:*',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: {},
|
||||
isExact: true,
|
||||
path: '/dashboard',
|
||||
url: '/dashboard',
|
||||
};
|
||||
|
||||
const props = {
|
||||
testProp: 'test-value',
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Test Component: test-value')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should substitute route parameters in object string', async () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
const payload = await req.json();
|
||||
return res(ctx.status(200), ctx.json(authzMockResponse(payload, [true])));
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'read',
|
||||
'dashboard:{id}',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: { id: '123' },
|
||||
isExact: true,
|
||||
path: '/dashboard/:id',
|
||||
url: '/dashboard/123',
|
||||
};
|
||||
|
||||
const props = {
|
||||
testProp: 'test-value',
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Test Component: test-value')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle multiple route parameters', async () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
const payload = (await req.json()) as AuthtypesTransactionDTO[];
|
||||
const txn = payload[0];
|
||||
const responseData: AuthtypesGettableTransactionDTO[] = [
|
||||
{
|
||||
relation: txn.relation,
|
||||
object: {
|
||||
resource: {
|
||||
name: txn.object.resource.name,
|
||||
type: txn.object.resource.type,
|
||||
},
|
||||
selector: '123:456',
|
||||
},
|
||||
authorized: true,
|
||||
},
|
||||
];
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({ data: responseData, status: 'success' }),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'update',
|
||||
'dashboard:{id}:{version}',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: { id: '123', version: '456' },
|
||||
isExact: true,
|
||||
path: '/dashboard/:id/:version',
|
||||
url: '/dashboard/123/456',
|
||||
};
|
||||
|
||||
const props = {
|
||||
testProp: 'test-value',
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Test Component: test-value')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should keep placeholder when route parameter is missing', async () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
const payload = await req.json();
|
||||
return res(ctx.status(200), ctx.json(authzMockResponse(payload, [true])));
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'read',
|
||||
'dashboard:{id}',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: {},
|
||||
isExact: true,
|
||||
path: '/dashboard',
|
||||
url: '/dashboard',
|
||||
};
|
||||
|
||||
const props = {
|
||||
testProp: 'test-value',
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Test Component: test-value')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should render loading fallback when loading', () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (_req, res, ctx) => {
|
||||
return res(
|
||||
ctx.delay('infinite'),
|
||||
ctx.status(200),
|
||||
ctx.json({ data: [], status: 'success' }),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'read',
|
||||
'dashboard:*',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: {},
|
||||
isExact: true,
|
||||
path: '/dashboard',
|
||||
url: '/dashboard',
|
||||
};
|
||||
|
||||
const props = {
|
||||
testProp: 'test-value',
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
expect(screen.getByText('SigNoz')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.queryByText('Test Component: test-value'),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render error fallback when API error occurs', async () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, (_req, res, ctx) => {
|
||||
return res(ctx.status(500), ctx.json({ error: 'Internal Server Error' }));
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'read',
|
||||
'dashboard:*',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: {},
|
||||
isExact: true,
|
||||
path: '/dashboard',
|
||||
url: '/dashboard',
|
||||
};
|
||||
|
||||
const props = {
|
||||
testProp: 'test-value',
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/Something went wrong/i)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(
|
||||
screen.queryByText('Test Component: test-value'),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render no permissions fallback when permission is denied', async () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
const payload = await req.json();
|
||||
return res(ctx.status(200), ctx.json(authzMockResponse(payload, [false])));
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'update',
|
||||
'dashboard:{id}',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: { id: '123' },
|
||||
isExact: true,
|
||||
path: '/dashboard/:id',
|
||||
url: '/dashboard/123',
|
||||
};
|
||||
|
||||
const props = {
|
||||
testProp: 'test-value',
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
await waitFor(() => {
|
||||
const heading = document.querySelector('h3');
|
||||
expect(heading).toBeInTheDocument();
|
||||
expect(heading?.textContent).toMatch(/permission to view/i);
|
||||
});
|
||||
|
||||
expect(screen.getByText('update')).toBeInTheDocument();
|
||||
expect(screen.getByText('dashboard:123')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.queryByText('Test Component: test-value'),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should pass all props to wrapped component', async () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
const payload = await req.json();
|
||||
return res(ctx.status(200), ctx.json(authzMockResponse(payload, [true])));
|
||||
}),
|
||||
);
|
||||
|
||||
const ComponentWithMultipleProps = ({
|
||||
prop1,
|
||||
prop2,
|
||||
prop3,
|
||||
}: {
|
||||
prop1: string;
|
||||
prop2: number;
|
||||
prop3: boolean;
|
||||
}): ReactElement => (
|
||||
<div>
|
||||
{prop1} - {prop2} - {prop3.toString()}
|
||||
</div>
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
ComponentWithMultipleProps,
|
||||
'read',
|
||||
'dashboard:*',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: {},
|
||||
isExact: true,
|
||||
path: '/dashboard',
|
||||
url: '/dashboard',
|
||||
};
|
||||
|
||||
const props = {
|
||||
prop1: 'value1',
|
||||
prop2: 42,
|
||||
prop3: true,
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('value1 - 42 - true')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should memoize resolved object based on route params', async () => {
|
||||
let requestCount = 0;
|
||||
const requestedObjects: string[] = [];
|
||||
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
requestCount++;
|
||||
const payload = (await req.json()) as AuthtypesTransactionDTO[];
|
||||
const obj = payload[0]?.object;
|
||||
const name = obj?.resource?.name;
|
||||
const selector = obj?.selector ?? '*';
|
||||
const objectStr =
|
||||
obj?.resource?.type === 'metaresources' ? name : `${name}:${selector}`;
|
||||
requestedObjects.push(objectStr ?? '');
|
||||
|
||||
return res(ctx.status(200), ctx.json(authzMockResponse(payload, [true])));
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'read',
|
||||
'dashboard:{id}',
|
||||
);
|
||||
|
||||
const mockMatch1 = {
|
||||
params: { id: '123' },
|
||||
isExact: true,
|
||||
path: '/dashboard/:id',
|
||||
url: '/dashboard/123',
|
||||
};
|
||||
|
||||
const props1 = {
|
||||
testProp: 'test-value-1',
|
||||
match: mockMatch1,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
const { unmount } = render(<GuardedComponent {...props1} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Test Component: test-value-1')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(requestCount).toBe(1);
|
||||
expect(requestedObjects).toContain('dashboard:123');
|
||||
|
||||
unmount();
|
||||
|
||||
const mockMatch2 = {
|
||||
params: { id: '456' },
|
||||
isExact: true,
|
||||
path: '/dashboard/:id',
|
||||
url: '/dashboard/456',
|
||||
};
|
||||
|
||||
const props2 = {
|
||||
testProp: 'test-value-2',
|
||||
match: mockMatch2,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props2} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Test Component: test-value-2')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(requestCount).toBe(2);
|
||||
expect(requestedObjects).toContain('dashboard:456');
|
||||
});
|
||||
|
||||
it('should handle different relation types', async () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
const payload = await req.json();
|
||||
return res(ctx.status(200), ctx.json(authzMockResponse(payload, [true])));
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'delete',
|
||||
'dashboard:{id}',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: { id: '789' },
|
||||
isExact: true,
|
||||
path: '/dashboard/:id',
|
||||
url: '/dashboard/789',
|
||||
};
|
||||
|
||||
const props = {
|
||||
testProp: 'test-value',
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Test Component: test-value')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,73 @@
|
||||
import { ComponentType, ReactElement, useMemo } from 'react';
|
||||
import { RouteComponentProps } from 'react-router-dom';
|
||||
import {
|
||||
AuthZObject,
|
||||
AuthZRelation,
|
||||
BrandedPermission,
|
||||
} from 'hooks/useAuthZ/types';
|
||||
import { parsePermission } from 'hooks/useAuthZ/utils';
|
||||
|
||||
import ErrorBoundaryFallback from '../../pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import AppLoading from '../AppLoading/AppLoading';
|
||||
import { GuardAuthZ } from '../GuardAuthZ/GuardAuthZ';
|
||||
|
||||
import './createGuardedRoute.styles.scss';
|
||||
|
||||
const onErrorFallback = (): JSX.Element => <ErrorBoundaryFallback />;
|
||||
|
||||
function OnNoPermissionsFallback(response: {
|
||||
requiredPermissionName: BrandedPermission;
|
||||
}): ReactElement {
|
||||
const { relation, object } = parsePermission(response.requiredPermissionName);
|
||||
|
||||
return (
|
||||
<div className="guard-authz-error-no-authz">
|
||||
<div className="guard-authz-error-no-authz-content">
|
||||
<img src="/Icons/no-data.svg" alt="No permission" />
|
||||
<h3>Uh-oh! You don’t have permission to view this page.</h3>
|
||||
<p>
|
||||
You need the following permission to view this page:
|
||||
<br />
|
||||
Relation: <span>{relation}</span>
|
||||
<br />
|
||||
Object: <span>{object}</span>
|
||||
<br />
|
||||
Ask your SigNoz administrator to grant access.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||
export function createGuardedRoute<P extends object, R extends AuthZRelation>(
|
||||
Component: ComponentType<P>,
|
||||
relation: R,
|
||||
object: AuthZObject<R>,
|
||||
): ComponentType<P & RouteComponentProps<Record<string, string>>> {
|
||||
return function GuardedRouteComponent(
|
||||
props: P & RouteComponentProps<Record<string, string>>,
|
||||
): ReactElement {
|
||||
const resolvedObject = useMemo(() => {
|
||||
const paramPattern = /\{([^}]+)\}/g;
|
||||
return object.replace(paramPattern, (match, paramName) => {
|
||||
const paramValue = props.match?.params?.[paramName];
|
||||
return paramValue !== undefined ? paramValue : match;
|
||||
}) as AuthZObject<R>;
|
||||
}, [props.match?.params]);
|
||||
|
||||
return (
|
||||
<GuardAuthZ
|
||||
relation={relation}
|
||||
object={resolvedObject}
|
||||
fallbackOnLoading={<AppLoading />}
|
||||
fallbackOnError={onErrorFallback}
|
||||
fallbackOnNoPermissions={(response): ReactElement => (
|
||||
<OnNoPermissionsFallback {...response} />
|
||||
)}
|
||||
>
|
||||
<Component {...props} />
|
||||
</GuardAuthZ>
|
||||
);
|
||||
};
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import { useState } from 'react';
|
||||
import { CloudDownloadOutlined } from '@ant-design/icons';
|
||||
import { Button, Dropdown, MenuProps } from 'antd';
|
||||
import { Excel } from 'antd-table-saveas-excel';
|
||||
import { unparse } from 'papaparse';
|
||||
|
||||
import { DownloadProps } from './Download.types';
|
||||
@@ -8,25 +8,36 @@ import { DownloadProps } from './Download.types';
|
||||
import './Download.styles.scss';
|
||||
|
||||
function Download({ data, isLoading, fileName }: DownloadProps): JSX.Element {
|
||||
const downloadExcelFile = (): void => {
|
||||
const headers = Object.keys(Object.assign({}, ...data)).map((item) => {
|
||||
const updatedTitle = item
|
||||
.split('_')
|
||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(' ');
|
||||
return {
|
||||
title: updatedTitle,
|
||||
dataIndex: item,
|
||||
};
|
||||
});
|
||||
const excel = new Excel();
|
||||
excel
|
||||
.addSheet(fileName)
|
||||
.addColumns(headers)
|
||||
.addDataSource(data, {
|
||||
str2Percent: true,
|
||||
})
|
||||
.saveAs(`${fileName}.xlsx`);
|
||||
const [isDownloading, setIsDownloading] = useState(false);
|
||||
|
||||
const downloadExcelFile = async (): Promise<void> => {
|
||||
setIsDownloading(true);
|
||||
|
||||
try {
|
||||
const headers = Object.keys(Object.assign({}, ...data)).map((item) => {
|
||||
const updatedTitle = item
|
||||
.split('_')
|
||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(' ');
|
||||
return {
|
||||
title: updatedTitle,
|
||||
dataIndex: item,
|
||||
};
|
||||
});
|
||||
|
||||
const excelLib = await import('antd-table-saveas-excel');
|
||||
|
||||
const excel = new excelLib.Excel();
|
||||
excel
|
||||
.addSheet(fileName)
|
||||
.addColumns(headers)
|
||||
.addDataSource(data, {
|
||||
str2Percent: true,
|
||||
})
|
||||
.saveAs(`${fileName}.xlsx`);
|
||||
} finally {
|
||||
setIsDownloading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const downloadCsvFile = (): void => {
|
||||
@@ -59,7 +70,7 @@ function Download({ data, isLoading, fileName }: DownloadProps): JSX.Element {
|
||||
<Dropdown menu={menu} trigger={['click']}>
|
||||
<Button
|
||||
className="download-button"
|
||||
loading={isLoading}
|
||||
loading={isLoading || isDownloading}
|
||||
size="small"
|
||||
type="link"
|
||||
>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { useState } from 'react';
|
||||
import { Button, Popover, Typography } from 'antd';
|
||||
import { Excel } from 'antd-table-saveas-excel';
|
||||
import { FileDigit, FileDown, Sheet } from 'lucide-react';
|
||||
import { unparse } from 'papaparse';
|
||||
|
||||
@@ -8,25 +8,34 @@ import { DownloadProps } from './DownloadV2.types';
|
||||
import './DownloadV2.styles.scss';
|
||||
|
||||
function Download({ data, isLoading, fileName }: DownloadProps): JSX.Element {
|
||||
const downloadExcelFile = (): void => {
|
||||
const headers = Object.keys(Object.assign({}, ...data)).map((item) => {
|
||||
const updatedTitle = item
|
||||
.split('_')
|
||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(' ');
|
||||
return {
|
||||
title: updatedTitle,
|
||||
dataIndex: item,
|
||||
};
|
||||
});
|
||||
const excel = new Excel();
|
||||
excel
|
||||
.addSheet(fileName)
|
||||
.addColumns(headers)
|
||||
.addDataSource(data, {
|
||||
str2Percent: true,
|
||||
})
|
||||
.saveAs(`${fileName}.xlsx`);
|
||||
const [isDownloading, setIsDownloading] = useState(false);
|
||||
|
||||
const downloadExcelFile = async (): Promise<void> => {
|
||||
setIsDownloading(true);
|
||||
|
||||
try {
|
||||
const headers = Object.keys(Object.assign({}, ...data)).map((item) => {
|
||||
const updatedTitle = item
|
||||
.split('_')
|
||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(' ');
|
||||
return {
|
||||
title: updatedTitle,
|
||||
dataIndex: item,
|
||||
};
|
||||
});
|
||||
const excelLib = await import('antd-table-saveas-excel');
|
||||
const excel = new excelLib.Excel();
|
||||
excel
|
||||
.addSheet(fileName)
|
||||
.addColumns(headers)
|
||||
.addDataSource(data, {
|
||||
str2Percent: true,
|
||||
})
|
||||
.saveAs(`${fileName}.xlsx`);
|
||||
} finally {
|
||||
setIsDownloading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const downloadCsvFile = (): void => {
|
||||
@@ -54,6 +63,7 @@ function Download({ data, isLoading, fileName }: DownloadProps): JSX.Element {
|
||||
type="text"
|
||||
onClick={downloadExcelFile}
|
||||
className="action-btns"
|
||||
loading={isDownloading}
|
||||
>
|
||||
Excel (.xlsx)
|
||||
</Button>
|
||||
|
||||
23
frontend/src/hooks/useAuthZ/permissions.type.ts
Normal file
23
frontend/src/hooks/useAuthZ/permissions.type.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
// AUTO GENERATED FILE - DO NOT EDIT - GENERATED BY scripts/generate-permissions-type
|
||||
export default {
|
||||
status: 'success',
|
||||
data: {
|
||||
resources: [
|
||||
{
|
||||
name: 'dashboard',
|
||||
type: 'metaresource',
|
||||
},
|
||||
{
|
||||
name: 'dashboards',
|
||||
type: 'metaresources',
|
||||
},
|
||||
],
|
||||
relations: {
|
||||
create: ['metaresources'],
|
||||
delete: ['user', 'role', 'organization', 'metaresource'],
|
||||
list: ['metaresources'],
|
||||
read: ['user', 'role', 'organization', 'metaresource'],
|
||||
update: ['user', 'role', 'organization', 'metaresource'],
|
||||
},
|
||||
},
|
||||
} as const;
|
||||
57
frontend/src/hooks/useAuthZ/types.ts
Normal file
57
frontend/src/hooks/useAuthZ/types.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import permissionsType from './permissions.type';
|
||||
import { ObjectSeparator } from './utils';
|
||||
|
||||
type PermissionsData = typeof permissionsType.data;
|
||||
export type Resource = PermissionsData['resources'][number];
|
||||
export type ResourceName = Resource['name'];
|
||||
export type ResourceType = Resource['type'];
|
||||
|
||||
type RelationsByType = PermissionsData['relations'];
|
||||
|
||||
type ResourceTypeMap = {
|
||||
[K in ResourceName]: Extract<Resource, { name: K }>['type'];
|
||||
};
|
||||
|
||||
type RelationName = keyof RelationsByType;
|
||||
|
||||
type ResourcesForRelation<R extends RelationName> = Extract<
|
||||
Resource,
|
||||
{ type: RelationsByType[R][number] }
|
||||
>['name'];
|
||||
|
||||
type IsPluralResource<
|
||||
R extends ResourceName
|
||||
> = ResourceTypeMap[R] extends 'metaresources' ? true : false;
|
||||
|
||||
type ObjectForResource<R extends ResourceName> = R extends infer U
|
||||
? U extends ResourceName
|
||||
? IsPluralResource<U> extends true
|
||||
? U
|
||||
: `${U}${typeof ObjectSeparator}${string}`
|
||||
: never
|
||||
: never;
|
||||
|
||||
type RelationToObject<R extends RelationName> = ObjectForResource<
|
||||
ResourcesForRelation<R>
|
||||
>;
|
||||
|
||||
type AllRelations = RelationName;
|
||||
|
||||
export type AuthZRelation = AllRelations;
|
||||
export type AuthZResource = ResourceName;
|
||||
export type AuthZObject<R extends AuthZRelation> = RelationToObject<R>;
|
||||
|
||||
export type BrandedPermission = string & { __brandedPermission: true };
|
||||
|
||||
export type AuthZCheckResponse = Record<
|
||||
BrandedPermission,
|
||||
{
|
||||
isGranted: boolean;
|
||||
}
|
||||
>;
|
||||
|
||||
export type UseAuthZResult = {
|
||||
isLoading: boolean;
|
||||
error: Error | null;
|
||||
permissions: AuthZCheckResponse | null;
|
||||
};
|
||||
496
frontend/src/hooks/useAuthZ/useAuthZ.test.tsx
Normal file
496
frontend/src/hooks/useAuthZ/useAuthZ.test.tsx
Normal file
@@ -0,0 +1,496 @@
|
||||
import { ReactElement } from 'react';
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import {
|
||||
AuthtypesGettableTransactionDTO,
|
||||
AuthtypesTransactionDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { ENVIRONMENT } from 'constants/env';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { AllTheProviders } from 'tests/test-utils';
|
||||
|
||||
import { BrandedPermission } from './types';
|
||||
import { useAuthZ } from './useAuthZ';
|
||||
import { buildPermission } from './utils';
|
||||
|
||||
const BASE_URL = ENVIRONMENT.baseURL || '';
|
||||
const AUTHZ_CHECK_URL = `${BASE_URL}/api/v1/authz/check`;
|
||||
|
||||
function authzMockResponse(
|
||||
payload: AuthtypesTransactionDTO[],
|
||||
authorizedByIndex: boolean[],
|
||||
): { data: AuthtypesGettableTransactionDTO[]; status: string } {
|
||||
return {
|
||||
data: payload.map((txn, i) => ({
|
||||
relation: txn.relation,
|
||||
object: txn.object,
|
||||
authorized: authorizedByIndex[i] ?? false,
|
||||
})),
|
||||
status: 'success',
|
||||
};
|
||||
}
|
||||
|
||||
const wrapper = ({ children }: { children: ReactElement }): ReactElement => (
|
||||
<AllTheProviders>{children}</AllTheProviders>
|
||||
);
|
||||
|
||||
describe('useAuthZ', () => {
|
||||
it('should fetch and return permissions successfully', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:*');
|
||||
const permission2 = buildPermission('update', 'dashboard:123');
|
||||
|
||||
const expectedResponse = {
|
||||
[permission1]: {
|
||||
isGranted: true,
|
||||
},
|
||||
[permission2]: {
|
||||
isGranted: false,
|
||||
},
|
||||
};
|
||||
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
const payload = await req.json();
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json(authzMockResponse(payload, [true, false])),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const { result } = renderHook(() => useAuthZ([permission1, permission2]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
expect(result.current.isLoading).toBe(true);
|
||||
expect(result.current.permissions).toBeNull();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(result.current.error).toBeNull();
|
||||
expect(result.current.permissions).toEqual(expectedResponse);
|
||||
});
|
||||
|
||||
it('should handle API errors', async () => {
|
||||
const permission = buildPermission('read', 'dashboard:*');
|
||||
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, (_req, res, ctx) => {
|
||||
return res(ctx.status(500), ctx.json({ error: 'Internal Server Error' }));
|
||||
}),
|
||||
);
|
||||
|
||||
const { result } = renderHook(() => useAuthZ([permission]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(result.current.error).not.toBeNull();
|
||||
expect(result.current.permissions).toBeNull();
|
||||
});
|
||||
|
||||
it('should refetch when permissions array changes', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:*');
|
||||
const permission2 = buildPermission('update', 'dashboard:123');
|
||||
const permission3 = buildPermission('delete', 'dashboard:456');
|
||||
|
||||
let requestCount = 0;
|
||||
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
requestCount++;
|
||||
const payload = await req.json();
|
||||
|
||||
if (payload.length === 1) {
|
||||
return res(ctx.status(200), ctx.json(authzMockResponse(payload, [true])));
|
||||
}
|
||||
|
||||
const authorized = payload.map(
|
||||
(txn: { relation: string }) =>
|
||||
txn.relation === 'read' || txn.relation === 'delete',
|
||||
);
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json(authzMockResponse(payload, authorized)),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const { result, rerender } = renderHook<
|
||||
ReturnType<typeof useAuthZ>,
|
||||
BrandedPermission[]
|
||||
>((permissions) => useAuthZ(permissions), {
|
||||
wrapper,
|
||||
initialProps: [permission1],
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(requestCount).toBe(1);
|
||||
expect(result.current.permissions).toEqual({
|
||||
[permission1]: {
|
||||
isGranted: true,
|
||||
},
|
||||
});
|
||||
|
||||
rerender([permission1, permission2, permission3]);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(requestCount).toBe(2);
|
||||
expect(result.current.permissions).toEqual({
|
||||
[permission1]: {
|
||||
isGranted: true,
|
||||
},
|
||||
[permission2]: {
|
||||
isGranted: false,
|
||||
},
|
||||
[permission3]: {
|
||||
isGranted: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should not refetch when permissions array order changes but content is the same', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:*');
|
||||
const permission2 = buildPermission('update', 'dashboard:123');
|
||||
|
||||
let requestCount = 0;
|
||||
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
requestCount++;
|
||||
const payload = await req.json();
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json(authzMockResponse(payload, [true, false])),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const { result, rerender } = renderHook<
|
||||
ReturnType<typeof useAuthZ>,
|
||||
BrandedPermission[]
|
||||
>((permissions) => useAuthZ(permissions), {
|
||||
wrapper,
|
||||
initialProps: [permission1, permission2],
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(requestCount).toBe(1);
|
||||
|
||||
rerender([permission2, permission1]);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(requestCount).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle empty permissions array', async () => {
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, (_req, res, ctx) => {
|
||||
return res(ctx.status(200), ctx.json({ data: [], status: 'success' }));
|
||||
}),
|
||||
);
|
||||
|
||||
const { result } = renderHook(() => useAuthZ([]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
expect(result.current.error).toBeNull();
|
||||
expect(result.current.permissions).toEqual({});
|
||||
});
|
||||
|
||||
it('should send correct payload format to API', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:*');
|
||||
const permission2 = buildPermission('update', 'dashboard:123');
|
||||
|
||||
let receivedPayload: any = null;
|
||||
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
receivedPayload = await req.json();
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json(authzMockResponse(receivedPayload, [true, false])),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const { result } = renderHook(() => useAuthZ([permission1, permission2]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(receivedPayload).toHaveLength(2);
|
||||
expect(receivedPayload[0]).toMatchObject({
|
||||
relation: 'read',
|
||||
object: {
|
||||
resource: { name: 'dashboard', type: 'metaresource' },
|
||||
selector: '*',
|
||||
},
|
||||
});
|
||||
expect(receivedPayload[1]).toMatchObject({
|
||||
relation: 'update',
|
||||
object: {
|
||||
resource: { name: 'dashboard', type: 'metaresource' },
|
||||
selector: '123',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should batch multiple hooks into single flight request', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:*');
|
||||
const permission2 = buildPermission('update', 'dashboard:123');
|
||||
const permission3 = buildPermission('delete', 'dashboard:456');
|
||||
|
||||
let requestCount = 0;
|
||||
const receivedPayloads: any[] = [];
|
||||
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
requestCount++;
|
||||
const payload = await req.json();
|
||||
receivedPayloads.push(payload);
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json(authzMockResponse(payload, [true, false, true])),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const { result: result1 } = renderHook(() => useAuthZ([permission1]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
const { result: result2 } = renderHook(() => useAuthZ([permission2]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
const { result: result3 } = renderHook(() => useAuthZ([permission3]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(result1.current.isLoading).toBe(false);
|
||||
expect(result2.current.isLoading).toBe(false);
|
||||
expect(result3.current.isLoading).toBe(false);
|
||||
},
|
||||
{ timeout: 200 },
|
||||
);
|
||||
|
||||
expect(requestCount).toBe(1);
|
||||
expect(receivedPayloads).toHaveLength(1);
|
||||
expect(receivedPayloads[0]).toHaveLength(3);
|
||||
expect(receivedPayloads[0][0]).toMatchObject({
|
||||
relation: 'read',
|
||||
object: {
|
||||
resource: { name: 'dashboard', type: 'metaresource' },
|
||||
selector: '*',
|
||||
},
|
||||
});
|
||||
expect(receivedPayloads[0][1]).toMatchObject({
|
||||
relation: 'update',
|
||||
object: { resource: { name: 'dashboard' }, selector: '123' },
|
||||
});
|
||||
expect(receivedPayloads[0][2]).toMatchObject({
|
||||
relation: 'delete',
|
||||
object: { resource: { name: 'dashboard' }, selector: '456' },
|
||||
});
|
||||
|
||||
expect(result1.current.permissions).toEqual({
|
||||
[permission1]: { isGranted: true },
|
||||
});
|
||||
expect(result2.current.permissions).toEqual({
|
||||
[permission2]: { isGranted: false },
|
||||
});
|
||||
expect(result3.current.permissions).toEqual({
|
||||
[permission3]: { isGranted: true },
|
||||
});
|
||||
});
|
||||
|
||||
it('should create separate batches for calls after single flight window', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:*');
|
||||
const permission2 = buildPermission('update', 'dashboard:123');
|
||||
const permission3 = buildPermission('delete', 'dashboard:456');
|
||||
|
||||
let requestCount = 0;
|
||||
const receivedPayloads: any[] = [];
|
||||
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
requestCount++;
|
||||
const payload = await req.json();
|
||||
receivedPayloads.push(payload);
|
||||
const authorized = payload.length === 1 ? [true] : [false, true];
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json(authzMockResponse(payload, authorized)),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const { result: result1 } = renderHook(() => useAuthZ([permission1]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(result1.current.isLoading).toBe(false);
|
||||
},
|
||||
{ timeout: 200 },
|
||||
);
|
||||
|
||||
expect(requestCount).toBe(1);
|
||||
expect(receivedPayloads[0]).toHaveLength(1);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
const { result: result2 } = renderHook(() => useAuthZ([permission2]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
const { result: result3 } = renderHook(() => useAuthZ([permission3]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(result2.current.isLoading).toBe(false);
|
||||
expect(result3.current.isLoading).toBe(false);
|
||||
},
|
||||
{ timeout: 200 },
|
||||
);
|
||||
|
||||
expect(requestCount).toBe(2);
|
||||
expect(receivedPayloads).toHaveLength(2);
|
||||
expect(receivedPayloads[1]).toHaveLength(2);
|
||||
expect(receivedPayloads[1][0]).toMatchObject({
|
||||
relation: 'update',
|
||||
object: { resource: { name: 'dashboard' }, selector: '123' },
|
||||
});
|
||||
expect(receivedPayloads[1][1]).toMatchObject({
|
||||
relation: 'delete',
|
||||
object: { resource: { name: 'dashboard' }, selector: '456' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should map permissions correctly when API returns response out of order', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:*');
|
||||
const permission2 = buildPermission('update', 'dashboard:123');
|
||||
const permission3 = buildPermission('delete', 'dashboard:456');
|
||||
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
const payload = await req.json();
|
||||
const reversed = [...payload].reverse();
|
||||
const authorizedByReversed = [true, false, true];
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
data: reversed.map((txn: any, i: number) => ({
|
||||
relation: txn.relation,
|
||||
object: txn.object,
|
||||
authorized: authorizedByReversed[i],
|
||||
})),
|
||||
status: 'success',
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const { result } = renderHook(
|
||||
() => useAuthZ([permission1, permission2, permission3]),
|
||||
{ wrapper },
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(result.current.permissions).toEqual({
|
||||
[permission1]: { isGranted: true },
|
||||
[permission2]: { isGranted: false },
|
||||
[permission3]: { isGranted: true },
|
||||
});
|
||||
});
|
||||
|
||||
it('should not leak state between separate batches', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:*');
|
||||
const permission2 = buildPermission('update', 'dashboard:123');
|
||||
|
||||
let requestCount = 0;
|
||||
|
||||
server.use(
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
requestCount++;
|
||||
const payload = await req.json();
|
||||
const authorized = payload.map(
|
||||
(txn: { relation: string }) => txn.relation === 'read',
|
||||
);
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json(authzMockResponse(payload, authorized)),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const { result: result1 } = renderHook(() => useAuthZ([permission1]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(result1.current.isLoading).toBe(false);
|
||||
},
|
||||
{ timeout: 200 },
|
||||
);
|
||||
|
||||
expect(requestCount).toBe(1);
|
||||
expect(result1.current.permissions).toEqual({
|
||||
[permission1]: { isGranted: true },
|
||||
});
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
const { result: result2 } = renderHook(() => useAuthZ([permission2]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(result2.current.isLoading).toBe(false);
|
||||
},
|
||||
{ timeout: 200 },
|
||||
);
|
||||
|
||||
expect(requestCount).toBe(2);
|
||||
expect(result1.current.permissions).toEqual({
|
||||
[permission1]: { isGranted: true },
|
||||
});
|
||||
expect(result2.current.permissions).toEqual({
|
||||
[permission2]: { isGranted: false },
|
||||
});
|
||||
expect(result1.current.permissions).not.toHaveProperty(permission2);
|
||||
expect(result2.current.permissions).not.toHaveProperty(permission1);
|
||||
});
|
||||
});
|
||||
129
frontend/src/hooks/useAuthZ/useAuthZ.tsx
Normal file
129
frontend/src/hooks/useAuthZ/useAuthZ.tsx
Normal file
@@ -0,0 +1,129 @@
|
||||
import { useMemo } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { authzCheck } from 'api/generated/services/authz';
|
||||
import type {
|
||||
AuthtypesObjectDTO,
|
||||
AuthtypesTransactionDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
|
||||
import { AuthZCheckResponse, BrandedPermission, UseAuthZResult } from './types';
|
||||
import {
|
||||
gettableTransactionToPermission,
|
||||
permissionToTransactionDto,
|
||||
} from './utils';
|
||||
|
||||
let ctx: Promise<AuthZCheckResponse> | null;
|
||||
let pendingPermissions: BrandedPermission[] = [];
|
||||
const SINGLE_FLIGHT_WAIT_TIME_MS = 50;
|
||||
const AUTHZ_CACHE_TIME = 20_000;
|
||||
|
||||
function dispatchPermission(
|
||||
permission: BrandedPermission,
|
||||
): Promise<AuthZCheckResponse> {
|
||||
pendingPermissions.push(permission);
|
||||
|
||||
if (!ctx) {
|
||||
let resolve: (v: AuthZCheckResponse) => void, reject: (reason?: any) => void;
|
||||
ctx = new Promise<AuthZCheckResponse>((r, re) => {
|
||||
resolve = r;
|
||||
reject = re;
|
||||
});
|
||||
|
||||
setTimeout(() => {
|
||||
const copiedPermissions = pendingPermissions.slice();
|
||||
pendingPermissions = [];
|
||||
ctx = null;
|
||||
|
||||
fetchManyPermissions(copiedPermissions).then(resolve).catch(reject);
|
||||
}, SINGLE_FLIGHT_WAIT_TIME_MS);
|
||||
}
|
||||
|
||||
return ctx;
|
||||
}
|
||||
|
||||
async function fetchManyPermissions(
|
||||
permissions: BrandedPermission[],
|
||||
): Promise<AuthZCheckResponse> {
|
||||
const payload: AuthtypesTransactionDTO[] = permissions.map((permission) => {
|
||||
const dto = permissionToTransactionDto(permission);
|
||||
const object: AuthtypesObjectDTO = {
|
||||
resource: {
|
||||
name: dto.object.resource.name,
|
||||
type: dto.object.resource.type,
|
||||
},
|
||||
selector: dto.object.selector,
|
||||
};
|
||||
return { relation: dto.relation, object };
|
||||
});
|
||||
|
||||
const { data } = await authzCheck(payload);
|
||||
|
||||
const fromApi = (data ?? []).reduce<AuthZCheckResponse>((acc, item) => {
|
||||
const permission = gettableTransactionToPermission(item);
|
||||
acc[permission] = { isGranted: !!item.authorized };
|
||||
return acc;
|
||||
}, {} as AuthZCheckResponse);
|
||||
|
||||
return permissions.reduce<AuthZCheckResponse>((acc, permission) => {
|
||||
acc[permission] = fromApi[permission] ?? { isGranted: false };
|
||||
return acc;
|
||||
}, {} as AuthZCheckResponse);
|
||||
}
|
||||
|
||||
export function useAuthZ(permissions: BrandedPermission[]): UseAuthZResult {
|
||||
const queryResults = useQueries(
|
||||
permissions.map((permission) => {
|
||||
return {
|
||||
queryKey: ['authz', permission],
|
||||
cacheTime: AUTHZ_CACHE_TIME,
|
||||
refetchOnMount: false,
|
||||
refetchIntervalInBackground: false,
|
||||
refetchOnWindowFocus: false,
|
||||
refetchOnReconnect: true,
|
||||
queryFn: async (): Promise<AuthZCheckResponse> => {
|
||||
const response = await dispatchPermission(permission);
|
||||
|
||||
return {
|
||||
[permission]: {
|
||||
isGranted: response[permission].isGranted,
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
}),
|
||||
);
|
||||
|
||||
const isLoading = useMemo(() => queryResults.some((q) => q.isLoading), [
|
||||
queryResults,
|
||||
]);
|
||||
const error = useMemo(
|
||||
() =>
|
||||
!isLoading
|
||||
? (queryResults.find((q) => !!q.error)?.error as Error) || null
|
||||
: null,
|
||||
[isLoading, queryResults],
|
||||
);
|
||||
const data = useMemo(() => {
|
||||
if (isLoading || error) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return queryResults.reduce((acc, q) => {
|
||||
if (!q.data) {
|
||||
return acc;
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(q.data)) {
|
||||
acc[key as BrandedPermission] = value;
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, {} as AuthZCheckResponse);
|
||||
}, [isLoading, error, queryResults]);
|
||||
|
||||
return {
|
||||
isLoading,
|
||||
error,
|
||||
permissions: data ?? null,
|
||||
};
|
||||
}
|
||||
85
frontend/src/hooks/useAuthZ/utils.ts
Normal file
85
frontend/src/hooks/useAuthZ/utils.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { AuthtypesTransactionDTO } from '../../api/generated/services/sigNoz.schemas';
|
||||
import permissionsType from './permissions.type';
|
||||
import {
|
||||
AuthZObject,
|
||||
AuthZRelation,
|
||||
AuthZResource,
|
||||
BrandedPermission,
|
||||
ResourceName,
|
||||
ResourceType,
|
||||
} from './types';
|
||||
|
||||
export const PermissionSeparator = '||__||';
|
||||
export const ObjectSeparator = ':';
|
||||
|
||||
export function buildPermission<R extends AuthZRelation>(
|
||||
relation: R,
|
||||
object: AuthZObject<R>,
|
||||
): BrandedPermission {
|
||||
return `${relation}${PermissionSeparator}${object}` as BrandedPermission;
|
||||
}
|
||||
|
||||
export function buildObjectString(
|
||||
resource: AuthZResource,
|
||||
objectId: string,
|
||||
): `${AuthZResource}${typeof ObjectSeparator}${string}` {
|
||||
return `${resource}${ObjectSeparator}${objectId}` as const;
|
||||
}
|
||||
|
||||
export function parsePermission(
|
||||
permission: BrandedPermission,
|
||||
): {
|
||||
relation: AuthZRelation;
|
||||
object: string;
|
||||
} {
|
||||
const [relation, object] = permission.split(PermissionSeparator);
|
||||
return { relation: relation as AuthZRelation, object };
|
||||
}
|
||||
|
||||
const resourceNameToType = permissionsType.data.resources.reduce((acc, r) => {
|
||||
acc[r.name] = r.type;
|
||||
return acc;
|
||||
}, {} as Record<ResourceName, ResourceType>);
|
||||
|
||||
export function permissionToTransactionDto(
|
||||
permission: BrandedPermission,
|
||||
): AuthtypesTransactionDTO {
|
||||
const { relation, object: objectStr } = parsePermission(permission);
|
||||
const directType = resourceNameToType[objectStr as ResourceName];
|
||||
if (directType === 'metaresources') {
|
||||
return {
|
||||
relation,
|
||||
object: {
|
||||
resource: { name: objectStr, type: directType },
|
||||
selector: '*',
|
||||
},
|
||||
};
|
||||
}
|
||||
const [resourceName, selector] = objectStr.split(ObjectSeparator);
|
||||
const type =
|
||||
resourceNameToType[resourceName as ResourceName] ?? 'metaresource';
|
||||
|
||||
return {
|
||||
relation,
|
||||
object: {
|
||||
resource: { name: resourceName, type },
|
||||
selector: selector || '*',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function gettableTransactionToPermission(
|
||||
item: AuthtypesTransactionDTO,
|
||||
): BrandedPermission {
|
||||
const {
|
||||
relation,
|
||||
object: { resource, selector },
|
||||
} = item;
|
||||
const resourceName = String(resource.name);
|
||||
const selectorStr = typeof selector === 'string' ? selector : '*';
|
||||
const objectStr =
|
||||
resource.type === 'metaresources'
|
||||
? resourceName
|
||||
: `${resourceName}${ObjectSeparator}${selectorStr}`;
|
||||
return `${relation}${PermissionSeparator}${objectStr}` as BrandedPermission;
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
createSetCursorHandler,
|
||||
createSetLegendHandler,
|
||||
createSetSeriesHandler,
|
||||
getPlot,
|
||||
isScrollEventInPlot,
|
||||
updatePlotVisibility,
|
||||
updateWindowSize,
|
||||
@@ -53,7 +54,7 @@ export default function TooltipPlugin({
|
||||
const [viewState, setState] = useState<TooltipViewState>(
|
||||
createInitialViewState,
|
||||
);
|
||||
const { plot, isHovering, isPinned, contents, style } = viewState;
|
||||
const { hasPlot, isHovering, isPinned, contents, style } = viewState;
|
||||
|
||||
/**
|
||||
* Merge a partial view update into the current React state.
|
||||
@@ -72,12 +73,25 @@ export default function TooltipPlugin({
|
||||
layoutRef.current?.observer.disconnect();
|
||||
layoutRef.current = createLayoutObserver(layoutRef);
|
||||
|
||||
/**
|
||||
* Plot lifecycle and GC: viewState uses hasPlot (boolean), not the plot
|
||||
* reference; clearPlotReferences runs in cleanup so
|
||||
* detached canvases can be garbage collected.
|
||||
*/
|
||||
// Controller holds the mutable interaction state for this tooltip
|
||||
// instance. It is intentionally *not* React state so uPlot hooks
|
||||
// and DOM listeners can update it freely without triggering a
|
||||
// render on every mouse move.
|
||||
const controller: TooltipControllerState = createInitialControllerState();
|
||||
|
||||
/**
|
||||
* Clear plot references so detached canvases can be garbage collected.
|
||||
*/
|
||||
const clearPlotReferences = (): void => {
|
||||
controller.plot = null;
|
||||
updateState({ hasPlot: false });
|
||||
};
|
||||
|
||||
const syncTooltipWithDashboard = syncMode === DashboardCursorSync.Tooltip;
|
||||
|
||||
// Enable uPlot's built-in cursor sync when requested so that
|
||||
@@ -110,9 +124,10 @@ export default function TooltipPlugin({
|
||||
// Lock uPlot's internal cursor when the tooltip is pinned so
|
||||
// subsequent mouse moves do not move the crosshair.
|
||||
function updateCursorLock(): void {
|
||||
if (controller.plot) {
|
||||
const plot = getPlot(controller);
|
||||
if (plot) {
|
||||
// @ts-ignore uPlot cursor lock is not working as expected
|
||||
controller.plot.cursor._lock = controller.pinned;
|
||||
plot.cursor._lock = controller.pinned;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -142,8 +157,9 @@ export default function TooltipPlugin({
|
||||
const isPinnedBeforeDismiss = controller.pinned;
|
||||
controller.pinned = false;
|
||||
controller.hoverActive = false;
|
||||
if (controller.plot) {
|
||||
controller.plot.setCursor({ left: -10, top: -10 });
|
||||
const plot = getPlot(controller);
|
||||
if (plot) {
|
||||
plot.setCursor({ left: -10, top: -10 });
|
||||
}
|
||||
scheduleRender(isPinnedBeforeDismiss);
|
||||
}
|
||||
@@ -151,11 +167,12 @@ export default function TooltipPlugin({
|
||||
// Build the React node to be rendered inside the tooltip by
|
||||
// delegating to the caller-provided `render` function.
|
||||
function createTooltipContents(): React.ReactNode {
|
||||
if (!controller.hoverActive || !controller.plot) {
|
||||
const plot = getPlot(controller);
|
||||
if (!controller.hoverActive || !plot) {
|
||||
return null;
|
||||
}
|
||||
return renderRef.current({
|
||||
uPlotInstance: controller.plot,
|
||||
uPlotInstance: plot,
|
||||
dataIndexes: controller.seriesIndexes,
|
||||
seriesIndex: controller.focusedSeriesIndex,
|
||||
isPinned: controller.pinned,
|
||||
@@ -240,9 +257,13 @@ export default function TooltipPlugin({
|
||||
|
||||
// When pinning is enabled, a click on the plot overlay while
|
||||
// hovering converts the transient tooltip into a pinned one.
|
||||
const handleUPlotOverClick = (u: uPlot, event: MouseEvent): void => {
|
||||
// Uses getPlot(controller) to avoid closing over u (plot), which
|
||||
// would retain the plot and detached canvases across unmounts.
|
||||
const handleUPlotOverClick = (event: MouseEvent): void => {
|
||||
const plot = getPlot(controller);
|
||||
if (
|
||||
event.target === u.over &&
|
||||
plot &&
|
||||
event.target === plot.over &&
|
||||
controller.hoverActive &&
|
||||
!controller.pinned &&
|
||||
controller.focusedSeriesIndex != null
|
||||
@@ -260,10 +281,9 @@ export default function TooltipPlugin({
|
||||
// on the controller and optionally attach the pinning handler.
|
||||
const handleInit = (u: uPlot): void => {
|
||||
controller.plot = u;
|
||||
updateState({ plot: u });
|
||||
updateState({ hasPlot: true });
|
||||
if (canPinTooltip) {
|
||||
overClickHandler = (event: MouseEvent): void =>
|
||||
handleUPlotOverClick(u, event);
|
||||
overClickHandler = handleUPlotOverClick;
|
||||
u.over.addEventListener('click', overClickHandler);
|
||||
}
|
||||
};
|
||||
@@ -299,7 +319,6 @@ export default function TooltipPlugin({
|
||||
const handleSetCursor = createSetCursorHandler(ctx);
|
||||
|
||||
handleWindowResize();
|
||||
|
||||
const removeReadyHook = config.addHook('ready', (): void =>
|
||||
updatePlotVisibility(controller),
|
||||
);
|
||||
@@ -325,16 +344,20 @@ export default function TooltipPlugin({
|
||||
removeSetSeriesHook();
|
||||
removeSetLegendHook();
|
||||
removeSetCursorHook();
|
||||
if (controller.plot && overClickHandler) {
|
||||
controller.plot.over.removeEventListener('click', overClickHandler);
|
||||
if (overClickHandler) {
|
||||
const plot = getPlot(controller);
|
||||
if (plot) {
|
||||
plot.over.removeEventListener('click', overClickHandler);
|
||||
}
|
||||
overClickHandler = null;
|
||||
}
|
||||
clearPlotReferences();
|
||||
};
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [config]);
|
||||
|
||||
useLayoutEffect((): void => {
|
||||
if (!plot || !layoutRef.current) {
|
||||
if (!hasPlot || !layoutRef.current) {
|
||||
return;
|
||||
}
|
||||
const layout = layoutRef.current;
|
||||
@@ -349,9 +372,9 @@ export default function TooltipPlugin({
|
||||
layout.width = 0;
|
||||
layout.height = 0;
|
||||
}
|
||||
}, [isHovering, plot]);
|
||||
}, [isHovering, hasPlot]);
|
||||
|
||||
if (!plot) {
|
||||
if (!hasPlot) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@@ -10,6 +10,11 @@ import {
|
||||
|
||||
const WINDOW_OFFSET = 16;
|
||||
|
||||
/** Get the plot instance from the controller; returns null if never set or cleared. */
|
||||
export function getPlot(controller: TooltipControllerState): uPlot | null {
|
||||
return controller.plot ?? null;
|
||||
}
|
||||
|
||||
export function createInitialControllerState(): TooltipControllerState {
|
||||
return {
|
||||
plot: null,
|
||||
@@ -46,12 +51,13 @@ export function updateWindowSize(controller: TooltipControllerState): void {
|
||||
* This is used to decide if a synced tooltip should be shown at all.
|
||||
*/
|
||||
export function updatePlotVisibility(controller: TooltipControllerState): void {
|
||||
if (!controller.plot) {
|
||||
const plot = getPlot(controller);
|
||||
if (!plot) {
|
||||
controller.plotWithinViewport = false;
|
||||
return;
|
||||
}
|
||||
controller.plotWithinViewport = isPlotInViewport(
|
||||
controller.plot.rect,
|
||||
plot.rect,
|
||||
controller.windowWidth,
|
||||
controller.windowHeight,
|
||||
);
|
||||
@@ -66,10 +72,11 @@ export function isScrollEventInPlot(
|
||||
event: Event,
|
||||
controller: TooltipControllerState,
|
||||
): boolean {
|
||||
const plot = getPlot(controller);
|
||||
return (
|
||||
event.target instanceof Node &&
|
||||
controller.plot !== null &&
|
||||
event.target.contains(controller.plot.root)
|
||||
plot !== null &&
|
||||
event.target.contains(plot.root)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -165,11 +172,12 @@ export function createSetLegendHandler(
|
||||
): (u: uPlot) => void {
|
||||
return (u: uPlot): void => {
|
||||
const { controller } = ctx;
|
||||
if (!controller.plot?.cursor?.idxs) {
|
||||
const plot = getPlot(controller);
|
||||
if (!plot?.cursor?.idxs) {
|
||||
return;
|
||||
}
|
||||
|
||||
const newSeriesIndexes = controller.plot.cursor.idxs.slice();
|
||||
const newSeriesIndexes = plot.cursor.idxs.slice();
|
||||
const isAnySeriesActive = newSeriesIndexes.some((v, i) => i > 0 && v != null);
|
||||
|
||||
const previousCursorDrivenBySync = controller.cursorDrivenBySync;
|
||||
|
||||
@@ -18,7 +18,8 @@ export enum DashboardCursorSync {
|
||||
}
|
||||
|
||||
export interface TooltipViewState {
|
||||
plot?: uPlot | null;
|
||||
/** Whether a plot instance exists; plot reference is in controller, not state. */
|
||||
hasPlot?: boolean;
|
||||
style: Partial<CSSProperties>;
|
||||
isHovering: boolean;
|
||||
isPinned: boolean;
|
||||
|
||||
@@ -123,7 +123,7 @@ export function createInitialViewState(): TooltipViewState {
|
||||
isHovering: false,
|
||||
isPinned: false,
|
||||
contents: null,
|
||||
plot: null,
|
||||
hasPlot: false,
|
||||
dismiss: (): void => {},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -6205,10 +6205,10 @@
|
||||
"@types/history" "^4.7.11"
|
||||
"@types/react" "*"
|
||||
|
||||
"@types/react-syntax-highlighter@15.5.7":
|
||||
version "15.5.7"
|
||||
resolved "https://registry.yarnpkg.com/@types/react-syntax-highlighter/-/react-syntax-highlighter-15.5.7.tgz#bd29020ccb118543d88779848f99059b64b02d0f"
|
||||
integrity sha512-bo5fEO5toQeyCp0zVHBeggclqf5SQ/Z5blfFmjwO5dkMVGPgmiwZsJh9nu/Bo5L7IHTuGWrja6LxJVE2uB5ZrQ==
|
||||
"@types/react-syntax-highlighter@15.5.13":
|
||||
version "15.5.13"
|
||||
resolved "https://registry.yarnpkg.com/@types/react-syntax-highlighter/-/react-syntax-highlighter-15.5.13.tgz#c5baf62a3219b3bf28d39cfea55d0a49a263d1f2"
|
||||
integrity sha512-uLGJ87j6Sz8UaBAooU0T6lWJ0dBmjZgN1PZTrj05TNql2/XpC6+4HhMT5syIdFUUt+FASfCeLLv4kBygNU+8qA==
|
||||
dependencies:
|
||||
"@types/react" "*"
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
@@ -47,9 +47,9 @@ type provider struct {
|
||||
gatewayHandler gateway.Handler
|
||||
fieldsHandler fields.Handler
|
||||
authzHandler authz.Handler
|
||||
rawDataExportHandler rawdataexport.Handler
|
||||
zeusHandler zeus.Handler
|
||||
querierHandler querier.Handler
|
||||
serviceAccountHandler serviceaccount.Handler
|
||||
}
|
||||
|
||||
func NewFactory(
|
||||
@@ -69,9 +69,9 @@ func NewFactory(
|
||||
gatewayHandler gateway.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
rawDataExportHandler rawdataexport.Handler,
|
||||
zeusHandler zeus.Handler,
|
||||
querierHandler querier.Handler,
|
||||
serviceAccountHandler serviceaccount.Handler,
|
||||
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
|
||||
return newProvider(
|
||||
@@ -94,9 +94,9 @@ func NewFactory(
|
||||
gatewayHandler,
|
||||
fieldsHandler,
|
||||
authzHandler,
|
||||
rawDataExportHandler,
|
||||
zeusHandler,
|
||||
querierHandler,
|
||||
serviceAccountHandler,
|
||||
)
|
||||
})
|
||||
}
|
||||
@@ -121,9 +121,9 @@ func newProvider(
|
||||
gatewayHandler gateway.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
rawDataExportHandler rawdataexport.Handler,
|
||||
zeusHandler zeus.Handler,
|
||||
querierHandler querier.Handler,
|
||||
serviceAccountHandler serviceaccount.Handler,
|
||||
) (apiserver.APIServer, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
|
||||
router := mux.NewRouter().UseEncodedPath()
|
||||
@@ -146,9 +146,9 @@ func newProvider(
|
||||
gatewayHandler: gatewayHandler,
|
||||
fieldsHandler: fieldsHandler,
|
||||
authzHandler: authzHandler,
|
||||
rawDataExportHandler: rawDataExportHandler,
|
||||
zeusHandler: zeusHandler,
|
||||
querierHandler: querierHandler,
|
||||
serviceAccountHandler: serviceAccountHandler,
|
||||
}
|
||||
|
||||
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
|
||||
@@ -221,10 +221,6 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addRawDataExportRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addZeusRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -233,6 +229,10 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addServiceAccountRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/exporttypes"
|
||||
v5 "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addRawDataExportRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v1/export_raw_data", handler.New(provider.authZ.ViewAccess(provider.rawDataExportHandler.ExportRawData), handler.OpenAPIDef{
|
||||
ID: "HandleExportRawDataGET",
|
||||
Tags: []string{"logs", "traces"},
|
||||
Summary: "Export raw data",
|
||||
Description: "This endpoints allows simple query exporting raw data for traces and logs",
|
||||
RequestQuery: new(exporttypes.ExportRawDataQueryParams),
|
||||
RequestContentType: "",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := router.Handle("/api/v1/export_raw_data", handler.New(provider.authZ.ViewAccess(provider.rawDataExportHandler.ExportRawData), handler.OpenAPIDef{
|
||||
ID: "HandleExportRawDataPOST",
|
||||
Tags: []string{"logs", "traces"},
|
||||
Summary: "Export raw data",
|
||||
Description: "This endpoints allows complex query exporting raw data for traces and logs",
|
||||
Request: new(v5.QueryRangeRequest),
|
||||
RequestQuery: new(exporttypes.ExportRawDataFormatQueryParam),
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
184
pkg/apiserver/signozapiserver/serviceaccount.go
Normal file
184
pkg/apiserver/signozapiserver/serviceaccount.go
Normal file
@@ -0,0 +1,184 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/serviceaccounttypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addServiceAccountRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v1/service_accounts", handler.New(provider.authZ.AdminAccess(provider.serviceAccountHandler.Create), handler.OpenAPIDef{
|
||||
ID: "CreateServiceAccount",
|
||||
Tags: []string{"serviceaccount"},
|
||||
Summary: "Create service account",
|
||||
Description: "This endpoint creates a service account",
|
||||
Request: new(serviceaccounttypes.PostableServiceAccount),
|
||||
RequestContentType: "",
|
||||
Response: new(types.Identifiable),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusConflict},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/service_accounts", handler.New(provider.authZ.AdminAccess(provider.serviceAccountHandler.List), handler.OpenAPIDef{
|
||||
ID: "ListServiceAccounts",
|
||||
Tags: []string{"serviceaccount"},
|
||||
Summary: "List service accounts",
|
||||
Description: "This endpoint lists the service accounts for an organisation",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: make([]*serviceaccounttypes.ServiceAccount, 0),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/service_accounts/{id}", handler.New(provider.authZ.AdminAccess(provider.serviceAccountHandler.Get), handler.OpenAPIDef{
|
||||
ID: "GetServiceAccount",
|
||||
Tags: []string{"serviceaccount"},
|
||||
Summary: "Gets a service account",
|
||||
Description: "This endpoint gets an existing service account",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(serviceaccounttypes.ServiceAccount),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusNotFound},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/service_accounts/{id}", handler.New(provider.authZ.AdminAccess(provider.serviceAccountHandler.Update), handler.OpenAPIDef{
|
||||
ID: "UpdateServiceAccount",
|
||||
Tags: []string{"serviceaccount"},
|
||||
Summary: "Updates a service account",
|
||||
Description: "This endpoint updates an existing service account",
|
||||
Request: new(serviceaccounttypes.UpdatableServiceAccount),
|
||||
RequestContentType: "",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{http.StatusNotFound, http.StatusBadRequest},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodPut).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/service_accounts/{id}/status", handler.New(provider.authZ.AdminAccess(provider.serviceAccountHandler.UpdateStatus), handler.OpenAPIDef{
|
||||
ID: "UpdateServiceAccountStatus",
|
||||
Tags: []string{"serviceaccount"},
|
||||
Summary: "Updates a service account status",
|
||||
Description: "This endpoint updates an existing service account status",
|
||||
Request: new(serviceaccounttypes.UpdatableServiceAccountStatus),
|
||||
RequestContentType: "",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{http.StatusNotFound, http.StatusBadRequest},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodPut).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/service_accounts/{id}", handler.New(provider.authZ.AdminAccess(provider.serviceAccountHandler.Delete), handler.OpenAPIDef{
|
||||
ID: "DeleteServiceAccount",
|
||||
Tags: []string{"serviceaccount"},
|
||||
Summary: "Deletes a service account",
|
||||
Description: "This endpoint deletes an existing service account",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{http.StatusNotFound},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodDelete).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/service_accounts/{id}/keys", handler.New(provider.authZ.AdminAccess(provider.serviceAccountHandler.CreateFactorAPIKey), handler.OpenAPIDef{
|
||||
ID: "CreateServiceAccountKey",
|
||||
Tags: []string{"serviceaccount"},
|
||||
Summary: "Create a service account key",
|
||||
Description: "This endpoint creates a service account key",
|
||||
Request: new(serviceaccounttypes.PostableFactorAPIKey),
|
||||
RequestContentType: "",
|
||||
Response: new(serviceaccounttypes.GettableFactorAPIKeyWithKey),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusConflict},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/service_accounts/{id}/keys", handler.New(provider.authZ.AdminAccess(provider.serviceAccountHandler.ListFactorAPIKey), handler.OpenAPIDef{
|
||||
ID: "ListServiceAccountKeys",
|
||||
Tags: []string{"serviceaccount"},
|
||||
Summary: "List service account keys",
|
||||
Description: "This endpoint lists the service account keys",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: make([]*serviceaccounttypes.FactorAPIKey, 0),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/service_accounts/{id}/keys/{fid}", handler.New(provider.authZ.AdminAccess(provider.serviceAccountHandler.UpdateFactorAPIKey), handler.OpenAPIDef{
|
||||
ID: "UpdateServiceAccountKey",
|
||||
Tags: []string{"serviceaccount"},
|
||||
Summary: "Updates a service account key",
|
||||
Description: "This endpoint updates an existing service account key",
|
||||
Request: new(serviceaccounttypes.UpdatableFactorAPIKey),
|
||||
RequestContentType: "",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusNotFound},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodPut).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/service_accounts/{id}/keys/{fid}", handler.New(provider.authZ.AdminAccess(provider.serviceAccountHandler.RevokeFactorAPIKey), handler.OpenAPIDef{
|
||||
ID: "RevokeServiceAccountKey",
|
||||
Tags: []string{"serviceaccount"},
|
||||
Summary: "Revoke a service account key",
|
||||
Description: "This endpoint revokes an existing service account key",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{http.StatusNotFound},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodDelete).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -62,14 +62,17 @@ type AuthZ interface {
|
||||
// Lists all the roles for the organization filtered by name
|
||||
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*roletypes.Role, error)
|
||||
|
||||
// Lists all the roles for the organization filtered by ids
|
||||
ListByOrgIDAndIDs(context.Context, valuer.UUID, []valuer.UUID) ([]*roletypes.Role, error)
|
||||
|
||||
// Grants a role to the subject based on role name.
|
||||
Grant(context.Context, valuer.UUID, string, string) error
|
||||
Grant(context.Context, valuer.UUID, []string, string) error
|
||||
|
||||
// Revokes a granted role from the subject based on role name.
|
||||
Revoke(context.Context, valuer.UUID, string, string) error
|
||||
Revoke(context.Context, valuer.UUID, []string, string) error
|
||||
|
||||
// Changes the granted role for the subject based on role name.
|
||||
ModifyGrant(context.Context, valuer.UUID, string, string, string) error
|
||||
ModifyGrant(context.Context, valuer.UUID, []string, []string, string) error
|
||||
|
||||
// Bootstrap the managed roles.
|
||||
CreateManagedRoles(context.Context, valuer.UUID, []*roletypes.Role) error
|
||||
|
||||
@@ -96,6 +96,39 @@ func (store *store) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(roles) != len(names) {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(
|
||||
nil,
|
||||
roletypes.ErrCodeRoleNotFound,
|
||||
"not all roles found for the provided names: %v", names,
|
||||
)
|
||||
}
|
||||
|
||||
return roles, nil
|
||||
}
|
||||
|
||||
func (store *store) ListByOrgIDAndIDs(ctx context.Context, orgID valuer.UUID, ids []valuer.UUID) ([]*roletypes.StorableRole, error) {
|
||||
roles := make([]*roletypes.StorableRole, 0)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(&roles).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("id IN (?)", bun.In(ids)).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(roles) != len(ids) {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(
|
||||
nil,
|
||||
roletypes.ErrCodeRoleNotFound,
|
||||
"not all roles found for the provided ids: %v", ids,
|
||||
)
|
||||
}
|
||||
|
||||
return roles, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -114,28 +114,46 @@ func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.
|
||||
return roles, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
|
||||
func (provider *provider) ListByOrgIDAndIDs(ctx context.Context, orgID valuer.UUID, ids []valuer.UUID) ([]*roletypes.Role, error) {
|
||||
storableRoles, err := provider.store.ListByOrgIDAndIDs(ctx, orgID, ids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
roles := make([]*roletypes.Role, len(storableRoles))
|
||||
for idx, storable := range storableRoles {
|
||||
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
|
||||
}
|
||||
|
||||
return roles, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, names []string, subject string) error {
|
||||
selectors := make([]authtypes.Selector, len(names))
|
||||
for idx, name := range names {
|
||||
selectors[idx] = authtypes.MustNewSelector(authtypes.TypeRole, name)
|
||||
}
|
||||
|
||||
tuples, err := authtypes.TypeableRole.Tuples(
|
||||
subject,
|
||||
authtypes.RelationAssignee,
|
||||
[]authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, name),
|
||||
},
|
||||
selectors,
|
||||
orgID,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return provider.Write(ctx, tuples, nil)
|
||||
}
|
||||
|
||||
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
|
||||
err := provider.Revoke(ctx, orgID, existingRoleName, subject)
|
||||
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleNames []string, updatedRoleNames []string, subject string) error {
|
||||
err := provider.Revoke(ctx, orgID, existingRoleNames, subject)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = provider.Grant(ctx, orgID, updatedRoleName, subject)
|
||||
err = provider.Grant(ctx, orgID, updatedRoleNames, subject)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -143,13 +161,16 @@ func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, ex
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
|
||||
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, names []string, subject string) error {
|
||||
selectors := make([]authtypes.Selector, len(names))
|
||||
for idx, name := range names {
|
||||
selectors[idx] = authtypes.MustNewSelector(authtypes.TypeRole, name)
|
||||
}
|
||||
|
||||
tuples, err := authtypes.TypeableRole.Tuples(
|
||||
subject,
|
||||
authtypes.RelationAssignee,
|
||||
[]authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, name),
|
||||
},
|
||||
selectors,
|
||||
orgID,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -178,7 +199,7 @@ func (provider *provider) CreateManagedRoles(ctx context.Context, _ valuer.UUID,
|
||||
}
|
||||
|
||||
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
|
||||
return provider.Grant(ctx, orgID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil))
|
||||
return provider.Grant(ctx, orgID, []string{roletypes.SigNozAdminRoleName}, authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil))
|
||||
}
|
||||
|
||||
func (setter *provider) Create(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
|
||||
|
||||
62
pkg/modules/cloudintegration/cloudintegration.go
Normal file
62
pkg/modules/cloudintegration/cloudintegration.go
Normal file
@@ -0,0 +1,62 @@
|
||||
package cloudintegration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/cloudintegrationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Module interface {
|
||||
GetName() cloudintegrationtypes.CloudProviderType
|
||||
|
||||
// AgentCheckIn is called by agent to heartbeat and get latest config in response.
|
||||
AgentCheckIn(ctx context.Context, req *cloudintegrationtypes.PostableAgentCheckInPayload) (any, error)
|
||||
|
||||
GenerateConnectionParams(ctx context.Context) (*cloudintegrationtypes.GettableCloudIntegrationConnectionParams, error)
|
||||
// GenerateConnectionArtifact generates cloud provider specific connection information, client side handles how this information is shown
|
||||
GenerateConnectionArtifact(ctx context.Context, req *cloudintegrationtypes.PostableConnectionArtifact) (any, error)
|
||||
// GetAccountStatus returns agent connection status for a cloud integration account
|
||||
GetAccountStatus(ctx context.Context, orgID, accountID string) (*cloudintegrationtypes.GettableAccountStatus, error)
|
||||
// ListConnectedAccounts lists accounts where agent is connected
|
||||
ListConnectedAccounts(ctx context.Context, orgID string) (*cloudintegrationtypes.GettableConnectedAccountsList, error)
|
||||
|
||||
// LIstServices return list of services for a cloud provider attached with the accountID. This just returns a summary
|
||||
ListServices(ctx context.Context, orgID string, accountID *string) (any, error) // returns either GettableAWSServices or GettableAzureServices
|
||||
// GetServiceDetails returns service definition details for a serviceId. This returns config and other details required to show in service details page on client.
|
||||
GetServiceDetails(ctx context.Context, req *cloudintegrationtypes.GetServiceDetailsReq) (any, error)
|
||||
|
||||
// GetDashboard returns dashboard json for a give cloud integration service dashboard.
|
||||
// this only returns the dashboard when account is connected and service is enabled
|
||||
GetDashboard(ctx context.Context, id string, orgID valuer.UUID) (*dashboardtypes.Dashboard, error)
|
||||
// GetAvailableDashboards returns list of available dashboards across all connected cloud integration accounts in the org.
|
||||
// this list gets added to dashboard list page
|
||||
GetAvailableDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error)
|
||||
|
||||
// UpdateAccountConfig updates cloud integration account config
|
||||
UpdateAccountConfig(ctx context.Context, orgId valuer.UUID, accountId string, config []byte) (any, error)
|
||||
// UpdateServiceConfig updates cloud integration service config
|
||||
UpdateServiceConfig(ctx context.Context, serviceId string, orgID valuer.UUID, config []byte) (any, error)
|
||||
|
||||
// DisconnectAccount soft deletes/removes a cloud integration account.
|
||||
DisconnectAccount(ctx context.Context, orgID, accountID string) (*cloudintegrationtypes.CloudIntegration, error)
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
AgentCheckIn(http.ResponseWriter, *http.Request)
|
||||
|
||||
GenerateConnectionParams(http.ResponseWriter, *http.Request)
|
||||
GenerateConnectionArtifact(http.ResponseWriter, *http.Request)
|
||||
|
||||
ListConnectedAccounts(http.ResponseWriter, *http.Request)
|
||||
GetAccountStatus(http.ResponseWriter, *http.Request)
|
||||
ListServices(http.ResponseWriter, *http.Request)
|
||||
GetServiceDetails(http.ResponseWriter, *http.Request)
|
||||
|
||||
UpdateAccountConfig(http.ResponseWriter, *http.Request)
|
||||
UpdateServiceConfig(http.ResponseWriter, *http.Request)
|
||||
|
||||
DisconnectAccount(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
193
pkg/modules/cloudintegration/implsqlstore/accounts.go
Normal file
193
pkg/modules/cloudintegration/implsqlstore/accounts.go
Normal file
@@ -0,0 +1,193 @@
|
||||
package implcloudintegration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
cloudintegrationtypes "github.com/SigNoz/signoz/pkg/types/cloudintegrationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
var ErrCodeCloudIntegrationAccountNotFound = errors.MustNewCode("cloud_integration_account_not_found")
|
||||
|
||||
// cloudProviderAccountsSQLRepository is a SQL-backed implementation of CloudIntegrationAccountStore.
|
||||
type cloudProviderAccountsSQLRepository struct {
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
// NewSQLCloudIntegrationAccountStore constructs a SQL-backed CloudIntegrationAccountStore.
|
||||
func NewSQLCloudIntegrationAccountStore(store sqlstore.SQLStore) cloudintegrationtypes.CloudIntegrationAccountStore {
|
||||
return &cloudProviderAccountsSQLRepository{store: store}
|
||||
}
|
||||
|
||||
// -----------------------------
|
||||
// Account store implementation
|
||||
// -----------------------------
|
||||
|
||||
func (r *cloudProviderAccountsSQLRepository) ListConnected(
|
||||
ctx context.Context, orgId string, cloudProvider string,
|
||||
) ([]cloudintegrationtypes.CloudIntegration, error) {
|
||||
accounts := []cloudintegrationtypes.CloudIntegration{}
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&accounts).
|
||||
Where("org_id = ?", orgId).
|
||||
Where("provider = ?", cloudProvider).
|
||||
Where("removed_at is NULL").
|
||||
Where("account_id is not NULL").
|
||||
Where("last_agent_report is not NULL").
|
||||
Order("created_at").
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
slog.ErrorContext(ctx, "error querying connected cloud accounts", "error", err)
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "could not query connected cloud accounts")
|
||||
}
|
||||
|
||||
return accounts, nil
|
||||
}
|
||||
|
||||
func (r *cloudProviderAccountsSQLRepository) Get(
|
||||
ctx context.Context, orgId string, provider string, id string,
|
||||
) (*cloudintegrationtypes.CloudIntegration, error) {
|
||||
var result cloudintegrationtypes.CloudIntegration
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&result).
|
||||
Where("org_id = ?", orgId).
|
||||
Where("provider = ?", provider).
|
||||
Where("id = ?", id).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, errors.WrapNotFoundf(
|
||||
err,
|
||||
ErrCodeCloudIntegrationAccountNotFound,
|
||||
"couldn't find account with Id %s", id,
|
||||
)
|
||||
}
|
||||
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud provider account")
|
||||
}
|
||||
|
||||
return &result, nil
|
||||
}
|
||||
|
||||
func (r *cloudProviderAccountsSQLRepository) GetConnectedCloudAccount(
|
||||
ctx context.Context, orgId string, provider string, accountId string,
|
||||
) (*cloudintegrationtypes.CloudIntegration, error) {
|
||||
var result cloudintegrationtypes.CloudIntegration
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&result).
|
||||
Where("org_id = ?", orgId).
|
||||
Where("provider = ?", provider).
|
||||
Where("account_id = ?", accountId).
|
||||
Where("last_agent_report is not NULL").
|
||||
Where("removed_at is NULL").
|
||||
Scan(ctx)
|
||||
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, errors.WrapNotFoundf(err, ErrCodeCloudIntegrationAccountNotFound, "couldn't find connected cloud account %s", accountId)
|
||||
} else if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud provider account")
|
||||
}
|
||||
|
||||
return &result, nil
|
||||
}
|
||||
|
||||
func (r *cloudProviderAccountsSQLRepository) Upsert(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
provider string,
|
||||
id *string,
|
||||
config []byte,
|
||||
accountId *string,
|
||||
agentReport *cloudintegrationtypes.AgentReport,
|
||||
removedAt *time.Time,
|
||||
) (*cloudintegrationtypes.CloudIntegration, error) {
|
||||
// Insert
|
||||
if id == nil {
|
||||
temp := valuer.GenerateUUID().StringValue()
|
||||
id = &temp
|
||||
}
|
||||
|
||||
// Prepare clause for setting values in `on conflict do update`
|
||||
onConflictSetStmts := []string{}
|
||||
setColStatement := func(col string) string {
|
||||
return fmt.Sprintf("%s=excluded.%s", col, col)
|
||||
}
|
||||
|
||||
if config != nil {
|
||||
onConflictSetStmts = append(
|
||||
onConflictSetStmts, setColStatement("config"),
|
||||
)
|
||||
}
|
||||
|
||||
if accountId != nil {
|
||||
onConflictSetStmts = append(
|
||||
onConflictSetStmts, setColStatement("account_id"),
|
||||
)
|
||||
}
|
||||
|
||||
if agentReport != nil {
|
||||
onConflictSetStmts = append(
|
||||
onConflictSetStmts, setColStatement("last_agent_report"),
|
||||
)
|
||||
}
|
||||
|
||||
if removedAt != nil {
|
||||
onConflictSetStmts = append(
|
||||
onConflictSetStmts, setColStatement("removed_at"),
|
||||
)
|
||||
}
|
||||
|
||||
// set updated_at to current timestamp if it's an upsert
|
||||
onConflictSetStmts = append(
|
||||
onConflictSetStmts, setColStatement("updated_at"),
|
||||
)
|
||||
|
||||
onConflictClause := ""
|
||||
if len(onConflictSetStmts) > 0 {
|
||||
onConflictClause = fmt.Sprintf(
|
||||
"conflict(id, provider, org_id) do update SET\n%s",
|
||||
strings.Join(onConflictSetStmts, ",\n"),
|
||||
)
|
||||
}
|
||||
|
||||
integration := cloudintegrationtypes.CloudIntegration{
|
||||
OrgID: orgId,
|
||||
Provider: provider,
|
||||
Identifiable: types.Identifiable{ID: valuer.MustNewUUID(*id)},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
Config: string(config),
|
||||
AccountID: accountId,
|
||||
LastAgentReport: agentReport,
|
||||
RemovedAt: removedAt,
|
||||
}
|
||||
|
||||
_, err := r.store.BunDB().NewInsert().
|
||||
Model(&integration).
|
||||
On(onConflictClause).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't upsert cloud integration account")
|
||||
}
|
||||
|
||||
upsertedAccount, err := r.Get(ctx, orgId, provider, *id)
|
||||
if err != nil {
|
||||
slog.ErrorContext(ctx, "error upserting cloud integration account", "error", err)
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't get upserted cloud integration account")
|
||||
}
|
||||
|
||||
return upsertedAccount, nil
|
||||
}
|
||||
133
pkg/modules/cloudintegration/implsqlstore/serviceconfig.go
Normal file
133
pkg/modules/cloudintegration/implsqlstore/serviceconfig.go
Normal file
@@ -0,0 +1,133 @@
|
||||
package implcloudintegration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
cloudintegrationtypes "github.com/SigNoz/signoz/pkg/types/cloudintegrationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
var ErrCodeServiceConfigNotFound = errors.MustNewCode("service_config_not_found")
|
||||
|
||||
// serviceConfigSQLRepository is a SQL-backed implementation of CloudIntegrationServiceStore.
|
||||
type serviceConfigSQLRepository struct {
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
// NewSQLCloudIntegrationServiceStore constructs a SQL-backed CloudIntegrationServiceStore.
|
||||
func NewSQLCloudIntegrationServiceStore(store sqlstore.SQLStore) cloudintegrationtypes.CloudIntegrationServiceStore {
|
||||
return &serviceConfigSQLRepository{store: store}
|
||||
}
|
||||
|
||||
// -----------------------------
|
||||
// Service config store implementation
|
||||
// -----------------------------
|
||||
|
||||
func (r *serviceConfigSQLRepository) Get(
|
||||
ctx context.Context,
|
||||
orgID string,
|
||||
cloudAccountId string,
|
||||
serviceType string,
|
||||
) ([]byte, error) {
|
||||
var result cloudintegrationtypes.CloudIntegrationService
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&result).
|
||||
Join("JOIN cloud_integration ci ON ci.id = cis.cloud_integration_id").
|
||||
Where("ci.org_id = ?", orgID).
|
||||
Where("ci.id = ?", cloudAccountId).
|
||||
Where("cis.type = ?", serviceType).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, errors.WrapNotFoundf(err, ErrCodeServiceConfigNotFound, "couldn't find config for cloud account %s", cloudAccountId)
|
||||
}
|
||||
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud service config")
|
||||
}
|
||||
|
||||
return []byte(result.Config), nil
|
||||
}
|
||||
|
||||
func (r *serviceConfigSQLRepository) Upsert(
|
||||
ctx context.Context,
|
||||
orgID string,
|
||||
cloudProvider string,
|
||||
cloudAccountId string,
|
||||
serviceId string,
|
||||
config []byte,
|
||||
) ([]byte, error) {
|
||||
// get cloud integration id from account id
|
||||
// if the account is not connected, we don't need to upsert the config
|
||||
var cloudIntegrationId string
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model((*cloudintegrationtypes.CloudIntegration)(nil)).
|
||||
Column("id").
|
||||
Where("provider = ?", cloudProvider).
|
||||
Where("account_id = ?", cloudAccountId).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("removed_at is NULL").
|
||||
Where("last_agent_report is not NULL").
|
||||
Scan(ctx, &cloudIntegrationId)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, errors.WrapNotFoundf(
|
||||
err,
|
||||
ErrCodeCloudIntegrationAccountNotFound,
|
||||
"couldn't find active cloud integration account",
|
||||
)
|
||||
}
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud integration id")
|
||||
}
|
||||
|
||||
serviceConfig := cloudintegrationtypes.CloudIntegrationService{
|
||||
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
Config: string(config),
|
||||
Type: serviceId,
|
||||
CloudIntegrationID: cloudIntegrationId,
|
||||
}
|
||||
_, err = r.store.BunDB().NewInsert().
|
||||
Model(&serviceConfig).
|
||||
On("conflict(cloud_integration_id, type) do update set config=excluded.config, updated_at=excluded.updated_at").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't upsert cloud service config")
|
||||
}
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func (r *serviceConfigSQLRepository) GetAllForAccount(
|
||||
ctx context.Context,
|
||||
orgID string,
|
||||
cloudAccountId string,
|
||||
) (map[string][]byte, error) {
|
||||
var serviceConfigs []cloudintegrationtypes.CloudIntegrationService
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&serviceConfigs).
|
||||
Join("JOIN cloud_integration ci ON ci.id = cis.cloud_integration_id").
|
||||
Where("ci.id = ?", cloudAccountId).
|
||||
Where("ci.org_id = ?", orgID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query service configs from db")
|
||||
}
|
||||
|
||||
result := make(map[string][]byte)
|
||||
|
||||
for _, r := range serviceConfigs {
|
||||
result[r.Type] = []byte(r.Config)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
@@ -6,18 +6,18 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"slices"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/exporttypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -31,52 +31,129 @@ func NewHandler(module rawdataexport.Module) rawdataexport.Handler {
|
||||
return &handler{module: module}
|
||||
}
|
||||
|
||||
// ExportRawData handles data export requests.
|
||||
//
|
||||
// API Documentation:
|
||||
// Endpoint: GET /api/v1/export_raw_data
|
||||
//
|
||||
// Query Parameters:
|
||||
//
|
||||
// - source (optional): Type of data to export ["logs" (default), "metrics", "traces"]
|
||||
// Note: Currently only "logs" is fully supported
|
||||
//
|
||||
// - format (optional): Output format ["csv" (default), "jsonl"]
|
||||
//
|
||||
// - start (required): Start time for query (Unix timestamp in nanoseconds)
|
||||
//
|
||||
// - end (required): End time for query (Unix timestamp in nanoseconds)
|
||||
//
|
||||
// - limit (optional): Maximum number of rows to export
|
||||
// Constraints: Must be positive and cannot exceed MAX_EXPORT_ROW_COUNT_LIMIT
|
||||
//
|
||||
// - filter (optional): Filter expression to apply to the query
|
||||
//
|
||||
// - columns (optional): Specific columns to include in export
|
||||
// Default: all columns are returned
|
||||
// Format: ["context.field:type", "context.field", "field"]
|
||||
//
|
||||
// - order_by (optional): Sorting specification ["column:direction" or "context.field:type:direction"]
|
||||
// Direction: "asc" or "desc"
|
||||
// Default: ["timestamp:desc", "id:desc"]
|
||||
//
|
||||
// Response Headers:
|
||||
// - Content-Type: "text/csv" or "application/x-ndjson"
|
||||
// - Content-Encoding: "gzip" (handled by HTTP middleware)
|
||||
// - Content-Disposition: "attachment; filename=\"data_exported.[format]\""
|
||||
// - Cache-Control: "no-cache"
|
||||
// - Vary: "Accept-Encoding"
|
||||
// - Transfer-Encoding: "chunked"
|
||||
// - Trailers: X-Response-Complete
|
||||
//
|
||||
// Response Format:
|
||||
//
|
||||
// CSV: Headers in first row, data in subsequent rows
|
||||
// JSONL: One JSON object per line
|
||||
//
|
||||
// Example Usage:
|
||||
//
|
||||
// Basic CSV export:
|
||||
// GET /api/v1/export_raw_data?start=1693612800000000000&end=1693699199000000000
|
||||
//
|
||||
// Export with columns and format:
|
||||
// GET /api/v1/export_raw_data?start=1693612800000000000&end=1693699199000000000&format=jsonl
|
||||
// &columns=timestamp&columns=severity&columns=message
|
||||
//
|
||||
// Export with filter and ordering:
|
||||
// GET /api/v1/export_raw_data?start=1693612800000000000&end=1693699199000000000
|
||||
// &filter=severity="error"&order_by=timestamp:desc&limit=1000
|
||||
func (handler *handler) ExportRawData(rw http.ResponseWriter, r *http.Request) {
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
var format string
|
||||
source, err := getExportQuerySource(r.URL.Query())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
switch r.Method {
|
||||
case http.MethodGet:
|
||||
var params exporttypes.ExportRawDataQueryParams
|
||||
if err := binding.Query.BindQuery(r.URL.Query(), ¶ms); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
params.Normalize()
|
||||
if err := params.Validate(); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
format = params.Format
|
||||
queryRangeRequest = buildQueryRangeRequest(¶ms)
|
||||
case http.MethodPost:
|
||||
var formatParam exporttypes.ExportRawDataFormatQueryParam
|
||||
if err := binding.Query.BindQuery(r.URL.Query(), &formatParam); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
format = formatParam.Format
|
||||
if err := json.NewDecoder(r.Body).Decode(&queryRangeRequest); err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid request body: %v", err))
|
||||
return
|
||||
}
|
||||
switch source {
|
||||
case "logs":
|
||||
handler.exportLogs(rw, r)
|
||||
case "traces":
|
||||
handler.exportTraces(rw, r)
|
||||
case "metrics":
|
||||
handler.exportMetrics(rw, r)
|
||||
default:
|
||||
render.Error(rw, errors.Newf(errors.TypeMethodNotAllowed, errors.CodeMethodNotAllowed, "method not allowed, only GET/POST supported"))
|
||||
return
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid source: must be logs"))
|
||||
}
|
||||
}
|
||||
|
||||
if err := validateSpecForExport(&queryRangeRequest); err != nil {
|
||||
func (handler *handler) exportMetrics(rw http.ResponseWriter, r *http.Request) {
|
||||
render.Error(rw, errors.Newf(errors.TypeUnsupported, errors.CodeUnsupported, "metrics export is not yet supported"))
|
||||
}
|
||||
|
||||
func (handler *handler) exportTraces(rw http.ResponseWriter, r *http.Request) {
|
||||
render.Error(rw, errors.Newf(errors.TypeUnsupported, errors.CodeUnsupported, "traces export is not yet supported"))
|
||||
}
|
||||
|
||||
func (handler *handler) exportLogs(rw http.ResponseWriter, r *http.Request) {
|
||||
// Set up response headers
|
||||
rw.Header().Set("Cache-Control", "no-cache")
|
||||
rw.Header().Set("Vary", "Accept-Encoding") // Indicate that response varies based on Accept-Encoding
|
||||
rw.Header().Set("Access-Control-Expose-Headers", "Content-Disposition, X-Response-Complete")
|
||||
rw.Header().Set("Trailer", "X-Response-Complete")
|
||||
rw.Header().Set("Transfer-Encoding", "chunked")
|
||||
|
||||
queryParams := r.URL.Query()
|
||||
|
||||
startTime, endTime, err := getExportQueryTimeRange(queryParams)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateAndApplyDefaultExportLimits(queryRangeRequest.CompositeQuery.Queries); err != nil {
|
||||
limit, err := getExportQueryLimit(queryParams)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Use default OrderBy if not specified
|
||||
queryRangeRequest.UseDefaultOrderBy()
|
||||
format, err := getExportQueryFormat(queryParams)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Set appropriate content type and filename
|
||||
filename := fmt.Sprintf("data_exported_%s.%s", time.Now().Format("2006-01-02_150405"), format)
|
||||
rw.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
|
||||
|
||||
filterExpression := queryParams.Get("filter")
|
||||
|
||||
orderByExpression, err := getExportQueryOrderBy(queryParams)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
columns := getExportQueryColumns(queryParams)
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
@@ -90,134 +167,70 @@ func (handler *handler) ExportRawData(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
setExportResponseHeaders(rw, format)
|
||||
queryRangeRequest := qbtypes.QueryRangeRequest{
|
||||
Start: startTime,
|
||||
End: endTime,
|
||||
RequestType: qbtypes.RequestTypeRaw,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
spec := qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Name: "raw",
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: filterExpression,
|
||||
},
|
||||
Limit: limit,
|
||||
Order: orderByExpression,
|
||||
}
|
||||
|
||||
spec.SelectFields = columns
|
||||
|
||||
queryRangeRequest.CompositeQuery.Queries[0].Spec = spec
|
||||
|
||||
// This will signal Export module to stop sending data
|
||||
doneChan := make(chan any)
|
||||
defer close(doneChan)
|
||||
rowChan, errChan := handler.module.ExportRawData(r.Context(), orgID, &queryRangeRequest, doneChan)
|
||||
|
||||
isComplete, err := handler.executeExport(rowChan, errChan, format, rw)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
rw.Header().Set("X-Response-Complete", strconv.FormatBool(isComplete))
|
||||
}
|
||||
var isComplete bool
|
||||
|
||||
// validateSpecForExport validates query specs
|
||||
func validateSpecForExport(req *qbtypes.QueryRangeRequest) error {
|
||||
|
||||
queries := req.CompositeQuery.Queries
|
||||
|
||||
// If the trace operator query is not present, and there are multiple queries, return an error
|
||||
if req.TraceOperatorQueryIndex() == -1 && len(queries) > 1 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "multiple queries not allowed without a trace operator query")
|
||||
}
|
||||
|
||||
for idx := range queries {
|
||||
switch spec := queries[idx].Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||
qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
|
||||
qbtypes.QueryBuilderTraceOperator:
|
||||
// Supported spec types
|
||||
default:
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported query at index %d type: %T", idx, spec)
|
||||
}
|
||||
}
|
||||
|
||||
err := req.Validate(qbtypes.WithSkipLimitValidation())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateAndApplyDefaultExportLimits(queries []qbtypes.QueryEnvelope) error {
|
||||
for idx := range queries {
|
||||
limit := queries[idx].GetLimit()
|
||||
if limit == 0 {
|
||||
limit = DefaultExportRowCountLimit
|
||||
} else if limit < 0 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be positive")
|
||||
} else if limit > MaxExportRowCountLimit {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit cannot be more than %d", MaxExportRowCountLimit)
|
||||
}
|
||||
queries[idx].SetLimit(limit)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// buildQueryEnvelope creates a QueryEnvelope with a QueryBuilderQuery for the given aggregation type.
|
||||
func buildQueryEnvelope[T any](signal telemetrytypes.Signal, filter *qbtypes.Filter, limit int, order []qbtypes.OrderBy, selectFields []telemetrytypes.TelemetryFieldKey) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[T]{
|
||||
Signal: signal,
|
||||
Filter: filter,
|
||||
Limit: limit,
|
||||
Order: order,
|
||||
SelectFields: selectFields,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// buildQueryRangeRequest builds a QueryRangeRequest from already-bound and validated GET query params.
|
||||
func buildQueryRangeRequest(params *exporttypes.ExportRawDataQueryParams) qbtypes.QueryRangeRequest {
|
||||
var query qbtypes.QueryEnvelope
|
||||
switch params.Signal {
|
||||
case telemetrytypes.SignalLogs:
|
||||
query = buildQueryEnvelope[qbtypes.LogAggregation](params.Signal, ¶ms.Filter, params.Limit, params.Order, params.SelectFields)
|
||||
case telemetrytypes.SignalTraces:
|
||||
query = buildQueryEnvelope[qbtypes.TraceAggregation](params.Signal, ¶ms.Filter, params.Limit, params.Order, params.SelectFields)
|
||||
}
|
||||
|
||||
return qbtypes.QueryRangeRequest{
|
||||
Start: params.Start,
|
||||
End: params.End,
|
||||
RequestType: qbtypes.RequestTypeRaw,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{query},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// setExportResponseHeaders sets common HTTP headers for export responses.
|
||||
func setExportResponseHeaders(rw http.ResponseWriter, format string) {
|
||||
rw.Header().Set("Cache-Control", "no-cache")
|
||||
rw.Header().Set("Vary", "Accept-Encoding")
|
||||
rw.Header().Set("Access-Control-Expose-Headers", "Content-Disposition, X-Response-Complete")
|
||||
rw.Header().Set("Trailer", "X-Response-Complete")
|
||||
rw.Header().Set("Transfer-Encoding", "chunked")
|
||||
filename := fmt.Sprintf("data_exported_%s.%s", time.Now().Format("2006-01-02_150405"), format)
|
||||
rw.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
|
||||
}
|
||||
|
||||
// executeExport streams data from rowChan to the response writer in the specified format.
|
||||
func (handler *handler) executeExport(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, format string, rw http.ResponseWriter) (bool, error) {
|
||||
switch format {
|
||||
case "csv", "":
|
||||
rw.Header().Set("Content-Type", "text/csv")
|
||||
csvWriter := csv.NewWriter(rw)
|
||||
isComplete, err := handler.exportRawDataCSV(rowChan, errChan, csvWriter)
|
||||
isComplete, err = handler.exportLogsCSV(rowChan, errChan, csvWriter)
|
||||
if err != nil {
|
||||
return false, err
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
csvWriter.Flush()
|
||||
return isComplete, nil
|
||||
case "jsonl":
|
||||
rw.Header().Set("Content-Type", "application/x-ndjson")
|
||||
return handler.exportRawDataJSONL(rowChan, errChan, rw)
|
||||
isComplete, err = handler.exportLogsJSONL(rowChan, errChan, rw)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
default:
|
||||
return false, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl")
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl"))
|
||||
return
|
||||
}
|
||||
|
||||
rw.Header().Set("X-Response-Complete", strconv.FormatBool(isComplete))
|
||||
}
|
||||
|
||||
// exportRawDataCSV is a generic CSV export function that works with any raw data (logs, traces, etc.)
|
||||
func (handler *handler) exportRawDataCSV(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, csvWriter *csv.Writer) (bool, error) {
|
||||
|
||||
func (handler *handler) exportLogsCSV(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, csvWriter *csv.Writer) (bool, error) {
|
||||
var header []string
|
||||
headerToIndexMapping := make(map[string]int)
|
||||
|
||||
headerToIndexMapping := make(map[string]int, len(header))
|
||||
|
||||
totalBytes := uint64(0)
|
||||
for {
|
||||
@@ -255,8 +268,8 @@ func (handler *handler) exportRawDataCSV(rowChan <-chan *qbtypes.RawRow, errChan
|
||||
}
|
||||
}
|
||||
|
||||
// exportRawDataJSONL is a generic JSONL export function that works with any raw data (logs, traces, etc.)
|
||||
func (handler *handler) exportRawDataJSONL(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, writer io.Writer) (bool, error) {
|
||||
func (handler *handler) exportLogsJSONL(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, writer io.Writer) (bool, error) {
|
||||
|
||||
totalBytes := uint64(0)
|
||||
for {
|
||||
select {
|
||||
@@ -264,11 +277,9 @@ func (handler *handler) exportRawDataJSONL(rowChan <-chan *qbtypes.RawRow, errCh
|
||||
if !ok {
|
||||
return true, nil
|
||||
}
|
||||
jsonBytes, err := json.Marshal(row.Data)
|
||||
if err != nil {
|
||||
return false, errors.NewUnexpectedf(errors.CodeInternal, "error marshaling JSON: %s", err)
|
||||
}
|
||||
totalBytes += uint64(len(jsonBytes)) + 1
|
||||
// Handle JSON format (JSONL - one object per line)
|
||||
jsonBytes, _ := json.Marshal(row.Data)
|
||||
totalBytes += uint64(len(jsonBytes)) + 1 // +1 for newline
|
||||
|
||||
if _, err := writer.Write(jsonBytes); err != nil {
|
||||
return false, errors.NewUnexpectedf(errors.CodeInternal, "error writing JSON: %s", err)
|
||||
@@ -288,33 +299,74 @@ func (handler *handler) exportRawDataJSONL(rowChan <-chan *qbtypes.RawRow, errCh
|
||||
}
|
||||
}
|
||||
|
||||
// priorityColumns defines the columns that should appear first in the CSV output, in order.
|
||||
var priorityColumns = []string{"timestamp", "id"}
|
||||
func getExportQuerySource(queryParams url.Values) (string, error) {
|
||||
switch queryParams.Get("source") {
|
||||
case "logs", "":
|
||||
return "logs", nil
|
||||
case "metrics":
|
||||
return "metrics", errors.NewInvalidInputf(errors.CodeInvalidInput, "metrics export not yet supported")
|
||||
case "traces":
|
||||
return "traces", errors.NewInvalidInputf(errors.CodeInvalidInput, "traces export not yet supported")
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid source: must be logs, metrics or traces")
|
||||
}
|
||||
}
|
||||
|
||||
func getExportQueryFormat(queryParams url.Values) (string, error) {
|
||||
switch queryParams.Get("format") {
|
||||
case "csv", "":
|
||||
return "csv", nil
|
||||
case "jsonl":
|
||||
return "jsonl", nil
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl")
|
||||
}
|
||||
}
|
||||
|
||||
func getExportQueryLimit(queryParams url.Values) (int, error) {
|
||||
|
||||
limitStr := queryParams.Get("limit")
|
||||
if limitStr == "" {
|
||||
return DefaultExportRowCountLimit, nil
|
||||
} else {
|
||||
limit, err := strconv.Atoi(limitStr)
|
||||
if err != nil {
|
||||
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid limit format: %s", err.Error())
|
||||
}
|
||||
if limit <= 0 {
|
||||
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be positive")
|
||||
}
|
||||
if limit > MaxExportRowCountLimit {
|
||||
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "limit cannot be more than %d", MaxExportRowCountLimit)
|
||||
}
|
||||
return limit, nil
|
||||
}
|
||||
}
|
||||
|
||||
func getExportQueryTimeRange(queryParams url.Values) (uint64, uint64, error) {
|
||||
|
||||
startTimeStr := queryParams.Get("start")
|
||||
endTimeStr := queryParams.Get("end")
|
||||
|
||||
if startTimeStr == "" || endTimeStr == "" {
|
||||
return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "start and end time are required")
|
||||
}
|
||||
startTime, err := strconv.ParseUint(startTimeStr, 10, 64)
|
||||
if err != nil {
|
||||
return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid start time format: %s", err.Error())
|
||||
}
|
||||
endTime, err := strconv.ParseUint(endTimeStr, 10, 64)
|
||||
if err != nil {
|
||||
return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid end time format: %s", err.Error())
|
||||
}
|
||||
return startTime, endTime, nil
|
||||
}
|
||||
|
||||
func constructCSVHeaderFromQueryResponse(data map[string]any) []string {
|
||||
header := make([]string, 0, len(data))
|
||||
for key := range data {
|
||||
header = append(header, key)
|
||||
}
|
||||
// This is to ensure CSV output is consistent across multiple queries
|
||||
slices.SortFunc(header, func(a, b string) int {
|
||||
ai, bi := slices.Index(priorityColumns, a), slices.Index(priorityColumns, b)
|
||||
switch {
|
||||
case ai != -1 && bi != -1:
|
||||
return ai - bi
|
||||
case ai != -1:
|
||||
return -1
|
||||
case bi != -1:
|
||||
return 1
|
||||
default:
|
||||
if a < b {
|
||||
return -1
|
||||
} else if a > b {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
})
|
||||
return header
|
||||
}
|
||||
|
||||
@@ -375,12 +427,9 @@ func constructCSVRecordFromQueryResponse(data map[string]any, headerToIndexMappi
|
||||
valueStr = v.String()
|
||||
|
||||
default:
|
||||
jsonBytes, err := json.Marshal(v)
|
||||
if err != nil {
|
||||
valueStr = fmt.Sprintf("%v", v)
|
||||
} else {
|
||||
valueStr = string(jsonBytes)
|
||||
}
|
||||
// For all other complex types (maps, structs, etc.)
|
||||
jsonBytes, _ := json.Marshal(v)
|
||||
valueStr = string(jsonBytes)
|
||||
}
|
||||
|
||||
record[index] = sanitizeForCSV(valueStr)
|
||||
@@ -389,6 +438,26 @@ func constructCSVRecordFromQueryResponse(data map[string]any, headerToIndexMappi
|
||||
return record
|
||||
}
|
||||
|
||||
// getExportQueryColumns parses the "columns" query parameters and returns a slice of TelemetryFieldKey structs.
|
||||
// Each column should be a valid telemetry field key in the format "context.field:type" or "context.field" or "field"
|
||||
func getExportQueryColumns(queryParams url.Values) []telemetrytypes.TelemetryFieldKey {
|
||||
columnParams := queryParams["columns"]
|
||||
|
||||
columns := make([]telemetrytypes.TelemetryFieldKey, 0, len(columnParams))
|
||||
|
||||
for _, columnStr := range columnParams {
|
||||
// Skip empty strings
|
||||
columnStr = strings.TrimSpace(columnStr)
|
||||
if columnStr == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
columns = append(columns, telemetrytypes.GetFieldKeyFromKeyText(columnStr))
|
||||
}
|
||||
|
||||
return columns
|
||||
}
|
||||
|
||||
func getsizeOfStringSlice(slice []string) uint64 {
|
||||
var totalBytes uint64
|
||||
for _, str := range slice {
|
||||
@@ -396,3 +465,52 @@ func getsizeOfStringSlice(slice []string) uint64 {
|
||||
}
|
||||
return totalBytes
|
||||
}
|
||||
|
||||
// getExportQueryOrderBy parses the "order_by" query parameters and returns a slice of OrderBy structs.
|
||||
// Each "order_by" parameter should be in the format "column:direction"
|
||||
// Each "column" should be a valid telemetry field key in the format "context.field:type" or "context.field" or "field"
|
||||
func getExportQueryOrderBy(queryParams url.Values) ([]qbtypes.OrderBy, error) {
|
||||
orderByParam := queryParams.Get("order_by")
|
||||
|
||||
orderByParam = strings.TrimSpace(orderByParam)
|
||||
if orderByParam == "" {
|
||||
return telemetrylogs.DefaultLogsV2SortingOrder, nil
|
||||
}
|
||||
|
||||
parts := strings.Split(orderByParam, ":")
|
||||
if len(parts) != 2 && len(parts) != 3 {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order_by format: %s, should be <column>:<direction>", orderByParam)
|
||||
}
|
||||
|
||||
column := strings.Join(parts[:len(parts)-1], ":")
|
||||
direction := parts[len(parts)-1]
|
||||
|
||||
orderDirection, ok := qbtypes.OrderDirectionMap[direction]
|
||||
if !ok {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order_by direction: %s, should be one of %s, %s", direction, qbtypes.OrderDirectionAsc, qbtypes.OrderDirectionDesc)
|
||||
}
|
||||
|
||||
orderByKey := telemetrytypes.GetFieldKeyFromKeyText(column)
|
||||
|
||||
orderBy := []qbtypes.OrderBy{
|
||||
{
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: orderByKey,
|
||||
},
|
||||
Direction: orderDirection,
|
||||
},
|
||||
}
|
||||
|
||||
// If we are ordering by the timestamp column, also order by the ID column
|
||||
if orderByKey.Name == telemetrylogs.LogsV2TimestampColumn {
|
||||
orderBy = append(orderBy, qbtypes.OrderBy{
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
Direction: orderDirection,
|
||||
})
|
||||
}
|
||||
return orderBy, nil
|
||||
}
|
||||
|
||||
@@ -2,85 +2,58 @@ package implrawdataexport
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/types/exporttypes"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestExportRawDataQueryParams_BindingDefaults(t *testing.T) {
|
||||
var params exporttypes.ExportRawDataQueryParams
|
||||
err := binding.Query.BindQuery(url.Values{}, ¶ms)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "csv", params.Format)
|
||||
assert.Equal(t, DefaultExportRowCountLimit, params.Limit)
|
||||
}
|
||||
|
||||
func logQuery(limit int) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{Limit: limit},
|
||||
}
|
||||
}
|
||||
|
||||
func traceQuery(limit int) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{Limit: limit},
|
||||
}
|
||||
}
|
||||
|
||||
func traceOperatorQuery(limit int) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeTraceOperator,
|
||||
Spec: qbtypes.QueryBuilderTraceOperator{Limit: limit, Expression: "A"},
|
||||
}
|
||||
}
|
||||
|
||||
func makeRequest(queries ...qbtypes.QueryEnvelope) qbtypes.QueryRangeRequest {
|
||||
return qbtypes.QueryRangeRequest{
|
||||
Start: 1000000000000,
|
||||
End: 1000003600000,
|
||||
RequestType: qbtypes.RequestTypeRaw,
|
||||
CompositeQuery: qbtypes.CompositeQuery{Queries: queries},
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateSpecForExport(t *testing.T) {
|
||||
func TestGetExportQuerySource(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
req qbtypes.QueryRangeRequest
|
||||
expectedError bool
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedSource string
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "single log query",
|
||||
req: makeRequest(logQuery(0)),
|
||||
name: "default logs source",
|
||||
queryParams: url.Values{},
|
||||
expectedSource: "logs",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "single trace query",
|
||||
req: makeRequest(traceQuery(0)),
|
||||
name: "explicit logs source",
|
||||
queryParams: url.Values{"source": {"logs"}},
|
||||
expectedSource: "logs",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "trace operator alone",
|
||||
req: makeRequest(traceOperatorQuery(0)),
|
||||
name: "metrics source - not supported",
|
||||
queryParams: url.Values{"source": {"metrics"}},
|
||||
expectedSource: "metrics",
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "multiple queries without trace operator",
|
||||
req: makeRequest(logQuery(0), traceQuery(0)),
|
||||
expectedError: true,
|
||||
name: "traces source - not supported",
|
||||
queryParams: url.Values{"source": {"traces"}},
|
||||
expectedSource: "traces",
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "unsupported query type",
|
||||
req: makeRequest(qbtypes.QueryEnvelope{Type: qbtypes.QueryTypeBuilder, Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{}}),
|
||||
expectedError: true,
|
||||
name: "invalid source",
|
||||
queryParams: url.Values{"source": {"invalid"}},
|
||||
expectedSource: "",
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := validateSpecForExport(&tt.req)
|
||||
source, err := getExportQuerySource(tt.queryParams)
|
||||
assert.Equal(t, tt.expectedSource, source)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
@@ -90,69 +63,456 @@ func TestValidateSpecForExport(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateAndApplyDefaultExportLimits(t *testing.T) {
|
||||
func TestGetExportQueryFormat(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queries []qbtypes.QueryEnvelope
|
||||
expectedError bool
|
||||
checkQueries func(t *testing.T, queries []qbtypes.QueryEnvelope)
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedFormat string
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "single log query, zero limit gets default",
|
||||
queries: makeRequest(logQuery(0)).CompositeQuery.Queries,
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, DefaultExportRowCountLimit, q[0].GetLimit())
|
||||
},
|
||||
name: "default csv format",
|
||||
queryParams: url.Values{},
|
||||
expectedFormat: "csv",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "single log query, valid limit kept",
|
||||
queries: makeRequest(logQuery(1000)).CompositeQuery.Queries,
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, 1000, q[0].GetLimit())
|
||||
},
|
||||
name: "explicit csv format",
|
||||
queryParams: url.Values{"format": {"csv"}},
|
||||
expectedFormat: "csv",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "single log query, max limit kept",
|
||||
queries: makeRequest(logQuery(MaxExportRowCountLimit)).CompositeQuery.Queries,
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, MaxExportRowCountLimit, q[0].GetLimit())
|
||||
},
|
||||
name: "jsonl format",
|
||||
queryParams: url.Values{"format": {"jsonl"}},
|
||||
expectedFormat: "jsonl",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "single log query, limit exceeds max",
|
||||
queries: makeRequest(logQuery(MaxExportRowCountLimit + 1)).CompositeQuery.Queries,
|
||||
name: "invalid format",
|
||||
queryParams: url.Values{"format": {"xml"}},
|
||||
expectedFormat: "",
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
format, err := getExportQueryFormat(tt.queryParams)
|
||||
assert.Equal(t, tt.expectedFormat, format)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryLimit(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedLimit int
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "default limit",
|
||||
queryParams: url.Values{},
|
||||
expectedLimit: DefaultExportRowCountLimit,
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "valid limit",
|
||||
queryParams: url.Values{"limit": {"5000"}},
|
||||
expectedLimit: 5000,
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "maximum limit",
|
||||
queryParams: url.Values{"limit": {strconv.Itoa(MaxExportRowCountLimit)}},
|
||||
expectedLimit: MaxExportRowCountLimit,
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "limit exceeds maximum",
|
||||
queryParams: url.Values{"limit": {"100000"}},
|
||||
expectedLimit: 0,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "single log query, negative limit",
|
||||
queries: makeRequest(logQuery(-1)).CompositeQuery.Queries,
|
||||
name: "invalid limit format",
|
||||
queryParams: url.Values{"limit": {"invalid"}},
|
||||
expectedLimit: 0,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "single trace query, zero limit gets default",
|
||||
queries: makeRequest(traceQuery(0)).CompositeQuery.Queries,
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, DefaultExportRowCountLimit, q[0].GetLimit())
|
||||
name: "negative limit",
|
||||
queryParams: url.Values{"limit": {"-100"}},
|
||||
expectedLimit: 0,
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
limit, err := getExportQueryLimit(tt.queryParams)
|
||||
assert.Equal(t, tt.expectedLimit, limit)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryTimeRange(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedStartTime uint64
|
||||
expectedEndTime uint64
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "valid time range",
|
||||
queryParams: url.Values{
|
||||
"start": {"1640995200"},
|
||||
"end": {"1641081600"},
|
||||
},
|
||||
expectedStartTime: 1640995200,
|
||||
expectedEndTime: 1641081600,
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "missing start time",
|
||||
queryParams: url.Values{"end": {"1641081600"}},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "missing end time",
|
||||
queryParams: url.Values{"start": {"1640995200"}},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "missing both times",
|
||||
queryParams: url.Values{},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "invalid start time format",
|
||||
queryParams: url.Values{
|
||||
"start": {"invalid"},
|
||||
"end": {"1641081600"},
|
||||
},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "invalid end time format",
|
||||
queryParams: url.Values{
|
||||
"start": {"1640995200"},
|
||||
"end": {"invalid"},
|
||||
},
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
startTime, endTime, err := getExportQueryTimeRange(tt.queryParams)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expectedStartTime, startTime)
|
||||
assert.Equal(t, tt.expectedEndTime, endTime)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryColumns(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedColumns []telemetrytypes.TelemetryFieldKey
|
||||
}{
|
||||
{
|
||||
name: "no columns specified",
|
||||
queryParams: url.Values{},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{},
|
||||
},
|
||||
{
|
||||
name: "single column",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "trace operator alone, zero limit gets default",
|
||||
queries: makeRequest(traceOperatorQuery(0)).CompositeQuery.Queries,
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, DefaultExportRowCountLimit, q[0].GetLimit())
|
||||
name: "multiple columns",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", "message", "level"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "message"},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "empty column name (should be skipped)",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", "", "level"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "whitespace column name (should be skipped)",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", " ", "level"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "valid column name with data type",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", "attribute.user:string", "level"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "user", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "valid column name with dot notation",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", "attribute.user.string", "level"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "user.string", FieldContext: telemetrytypes.FieldContextAttribute},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := validateAndApplyDefaultExportLimits(tt.queries)
|
||||
columns := getExportQueryColumns(tt.queryParams)
|
||||
assert.Equal(t, len(tt.expectedColumns), len(columns))
|
||||
for i, expectedCol := range tt.expectedColumns {
|
||||
assert.Equal(t, expectedCol, columns[i])
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryOrderBy(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedOrder []qbtypes.OrderBy
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "no order specified",
|
||||
queryParams: url.Values{},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "single order error, direction not specified",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp"},
|
||||
},
|
||||
expectedOrder: nil,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "single order no error",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "multiple orders",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp:asc", "body:desc", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "empty order name (should be skipped)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp:asc", "", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "whitespace order name (should be skipped)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp:asc", " ", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "invalid order name (should error out)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"attributes.user:", "id:asc"},
|
||||
},
|
||||
expectedOrder: nil,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "valid order name (should be included)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"attribute.user:string:desc", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "user",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "valid order name (should be included)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"attribute.user.string:desc", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "user.string",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
order, err := getExportQueryOrderBy(tt.queryParams)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
if tt.checkQueries != nil {
|
||||
tt.checkQueries(t, tt.queries)
|
||||
assert.Equal(t, len(tt.expectedOrder), len(order))
|
||||
for i, expectedOrd := range tt.expectedOrder {
|
||||
assert.Equal(t, expectedOrd, order[i])
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -169,8 +529,13 @@ func TestConstructCSVHeaderFromQueryResponse(t *testing.T) {
|
||||
|
||||
header := constructCSVHeaderFromQueryResponse(data)
|
||||
|
||||
// Priority columns come first in order, then the rest alphabetically.
|
||||
assert.Equal(t, []string{"timestamp", "id", "level", "message"}, header)
|
||||
// Since map iteration order is not guaranteed, check that all expected keys are present
|
||||
expectedKeys := []string{"timestamp", "message", "level", "id"}
|
||||
assert.Equal(t, len(expectedKeys), len(header))
|
||||
|
||||
for _, key := range expectedKeys {
|
||||
assert.Contains(t, header, key)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConstructCSVRecordFromQueryResponse(t *testing.T) {
|
||||
|
||||
@@ -23,18 +23,8 @@ func NewModule(querier querier.Querier) rawdataexport.Module {
|
||||
|
||||
func (m *Module) ExportRawData(ctx context.Context, orgID valuer.UUID, rangeRequest *qbtypes.QueryRangeRequest, doneChan chan any) (chan *qbtypes.RawRow, chan error) {
|
||||
|
||||
traceOperatorQueryIndex := rangeRequest.TraceOperatorQueryIndex()
|
||||
|
||||
queries := rangeRequest.CompositeQuery.Queries
|
||||
|
||||
// If the trace operator query is present, mark the queries other than trace operator as disabled
|
||||
if traceOperatorQueryIndex > -1 {
|
||||
for idx := range len(queries) {
|
||||
if idx != traceOperatorQueryIndex {
|
||||
queries[idx].SetDisabled(true)
|
||||
}
|
||||
}
|
||||
}
|
||||
spec := rangeRequest.CompositeQuery.Queries[0].Spec.(qbtypes.QueryBuilderQuery[qbtypes.LogAggregation])
|
||||
rowCountLimit := spec.Limit
|
||||
|
||||
rowChan := make(chan *qbtypes.RawRow, 1)
|
||||
errChan := make(chan error, 1)
|
||||
@@ -48,62 +38,52 @@ func (m *Module) ExportRawData(ctx context.Context, orgID valuer.UUID, rangeRequ
|
||||
defer close(errChan)
|
||||
defer close(rowChan)
|
||||
|
||||
if traceOperatorQueryIndex > -1 {
|
||||
// If the trace operator query is present, we need to export the data for the trace operator query only
|
||||
exportRawDataForSingleQuery(m.querier, contextWithTimeout, orgID, rangeRequest, rowChan, errChan, doneChan, traceOperatorQueryIndex)
|
||||
} else {
|
||||
// If the trace operator query is not present, we need to export the data for the first query only
|
||||
exportRawDataForSingleQuery(m.querier, contextWithTimeout, orgID, rangeRequest, rowChan, errChan, doneChan, 0)
|
||||
rowCount := 0
|
||||
|
||||
for rowCount < rowCountLimit {
|
||||
spec.Limit = min(ChunkSize, rowCountLimit-rowCount)
|
||||
spec.Offset = rowCount
|
||||
|
||||
rangeRequest.CompositeQuery.Queries[0].Spec = spec
|
||||
|
||||
response, err := m.querier.QueryRange(contextWithTimeout, orgID, rangeRequest)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
return
|
||||
}
|
||||
|
||||
newRowsCount := 0
|
||||
for _, result := range response.Data.Results {
|
||||
resultData, ok := result.(*qbtypes.RawData)
|
||||
if !ok {
|
||||
errChan <- errors.NewInternalf(errors.CodeInternal, "expected RawData, got %T", result)
|
||||
return
|
||||
}
|
||||
|
||||
newRowsCount += len(resultData.Rows)
|
||||
for _, row := range resultData.Rows {
|
||||
select {
|
||||
case rowChan <- row:
|
||||
case <-doneChan:
|
||||
return
|
||||
case <-ctx.Done():
|
||||
errChan <- ctx.Err()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Break if we did not receive any new rows
|
||||
if newRowsCount == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
rowCount += newRowsCount
|
||||
|
||||
}
|
||||
}()
|
||||
|
||||
return rowChan, errChan
|
||||
|
||||
}
|
||||
|
||||
func exportRawDataForSingleQuery(querier querier.Querier, ctx context.Context, orgID valuer.UUID, rangeRequest *qbtypes.QueryRangeRequest, rowChan chan *qbtypes.RawRow, errChan chan error, doneChan chan any, queryIndex int) {
|
||||
|
||||
queries := rangeRequest.CompositeQuery.Queries
|
||||
rowCountLimit := queries[queryIndex].GetLimit()
|
||||
rowCount := 0
|
||||
|
||||
for rowCount < rowCountLimit {
|
||||
chunkSize := min(ChunkSize, rowCountLimit-rowCount)
|
||||
queries[queryIndex].SetLimit(chunkSize)
|
||||
queries[queryIndex].SetOffset(rowCount)
|
||||
|
||||
response, err := querier.QueryRange(ctx, orgID, rangeRequest)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
return
|
||||
}
|
||||
|
||||
newRowsCount := 0
|
||||
for _, result := range response.Data.Results {
|
||||
resultData, ok := result.(*qbtypes.RawData)
|
||||
if !ok {
|
||||
errChan <- errors.NewInternalf(errors.CodeInternal, "expected RawData, got %T", result)
|
||||
return
|
||||
}
|
||||
|
||||
newRowsCount += len(resultData.Rows)
|
||||
for _, row := range resultData.Rows {
|
||||
select {
|
||||
case rowChan <- row:
|
||||
case <-doneChan:
|
||||
return
|
||||
case <-ctx.Done():
|
||||
errChan <- ctx.Err()
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rowCount += newRowsCount
|
||||
|
||||
// Stop if we received fewer rows than requested — no more data available
|
||||
if newRowsCount < chunkSize {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/savedviewtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
@@ -24,7 +25,7 @@ func NewModule(sqlstore sqlstore.SQLStore) savedview.Module {
|
||||
}
|
||||
|
||||
func (module *module) GetViewsForFilters(ctx context.Context, orgID string, sourcePage string, name string, category string) ([]*v3.SavedView, error) {
|
||||
var views []types.SavedView
|
||||
var views []savedviewtypes.SavedView
|
||||
var err error
|
||||
if len(category) == 0 {
|
||||
err = module.sqlstore.BunDB().NewSelect().Model(&views).Where("org_id = ? AND source_page = ? AND name LIKE ?", orgID, sourcePage, "%"+name+"%").Scan(ctx)
|
||||
@@ -76,7 +77,7 @@ func (module *module) CreateView(ctx context.Context, orgID string, view v3.Save
|
||||
createBy := claims.Email
|
||||
updatedBy := claims.Email
|
||||
|
||||
dbView := types.SavedView{
|
||||
dbView := savedviewtypes.SavedView{
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
@@ -105,7 +106,7 @@ func (module *module) CreateView(ctx context.Context, orgID string, view v3.Save
|
||||
}
|
||||
|
||||
func (module *module) GetView(ctx context.Context, orgID string, uuid valuer.UUID) (*v3.SavedView, error) {
|
||||
var view types.SavedView
|
||||
var view savedviewtypes.SavedView
|
||||
err := module.sqlstore.BunDB().NewSelect().Model(&view).Where("org_id = ? AND id = ?", orgID, uuid.StringValue()).Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "error in getting saved view")
|
||||
@@ -146,7 +147,7 @@ func (module *module) UpdateView(ctx context.Context, orgID string, uuid valuer.
|
||||
updatedBy := claims.Email
|
||||
|
||||
_, err = module.sqlstore.BunDB().NewUpdate().
|
||||
Model(&types.SavedView{}).
|
||||
Model(&savedviewtypes.SavedView{}).
|
||||
Set("updated_at = ?, updated_by = ?, name = ?, category = ?, source_page = ?, tags = ?, data = ?, extra_data = ?",
|
||||
updatedAt, updatedBy, view.Name, view.Category, view.SourcePage, strings.Join(view.Tags, ","), data, view.ExtraData).
|
||||
Where("id = ?", uuid.StringValue()).
|
||||
@@ -160,7 +161,7 @@ func (module *module) UpdateView(ctx context.Context, orgID string, uuid valuer.
|
||||
|
||||
func (module *module) DeleteView(ctx context.Context, orgID string, uuid valuer.UUID) error {
|
||||
_, err := module.sqlstore.BunDB().NewDelete().
|
||||
Model(&types.SavedView{}).
|
||||
Model(&savedviewtypes.SavedView{}).
|
||||
Where("id = ?", uuid.StringValue()).
|
||||
Where("org_id = ?", orgID).
|
||||
Exec(ctx)
|
||||
@@ -171,7 +172,7 @@ func (module *module) DeleteView(ctx context.Context, orgID string, uuid valuer.
|
||||
}
|
||||
|
||||
func (module *module) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) {
|
||||
savedViews := []*types.SavedView{}
|
||||
savedViews := []*savedviewtypes.SavedView{}
|
||||
|
||||
err := module.
|
||||
sqlstore.
|
||||
@@ -184,5 +185,5 @@ func (module *module) Collect(ctx context.Context, orgID valuer.UUID) (map[strin
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return types.NewStatsFromSavedViews(savedViews), nil
|
||||
return savedviewtypes.NewStatsFromSavedViews(savedViews), nil
|
||||
}
|
||||
|
||||
335
pkg/modules/serviceaccount/implserviceaccount/handler.go
Normal file
335
pkg/modules/serviceaccount/implserviceaccount/handler.go
Normal file
@@ -0,0 +1,335 @@
|
||||
package implserviceaccount
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/serviceaccounttypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
module serviceaccount.Module
|
||||
}
|
||||
|
||||
func NewHandler(module serviceaccount.Module) serviceaccount.Handler {
|
||||
return &handler{module: module}
|
||||
}
|
||||
|
||||
func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
req := new(serviceaccounttypes.PostableServiceAccount)
|
||||
if err := binding.JSON.BindBody(r.Body, req); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
serviceAccount := serviceaccounttypes.NewServiceAccount(req.Name, req.Email, req.Roles, serviceaccounttypes.StatusActive, valuer.MustNewUUID(claims.OrgID))
|
||||
err = handler.module.Create(ctx, valuer.MustNewUUID(claims.OrgID), serviceAccount)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusCreated, types.Identifiable{ID: serviceAccount.ID})
|
||||
}
|
||||
|
||||
func (handler *handler) Get(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
id, err := valuer.NewUUID(mux.Vars(r)["id"])
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
serviceAccount, err := handler.module.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, serviceAccount)
|
||||
}
|
||||
|
||||
func (handler *handler) List(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
serviceAccounts, err := handler.module.List(ctx, valuer.MustNewUUID(claims.OrgID))
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, serviceAccounts)
|
||||
}
|
||||
|
||||
func (handler *handler) Update(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
id, err := valuer.NewUUID(mux.Vars(r)["id"])
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
req := new(serviceaccounttypes.UpdatableServiceAccount)
|
||||
if err := binding.JSON.BindBody(r.Body, req); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
serviceAccount, err := handler.module.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
serviceAccount.Update(req.Name, req.Email, req.Roles)
|
||||
err = handler.module.Update(ctx, valuer.MustNewUUID(claims.OrgID), serviceAccount)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (handler *handler) UpdateStatus(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
id, err := valuer.NewUUID(mux.Vars(r)["id"])
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
req := new(serviceaccounttypes.UpdatableServiceAccountStatus)
|
||||
if err := binding.JSON.BindBody(r.Body, req); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
serviceAccount, err := handler.module.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
serviceAccount.UpdateStatus(req.Status)
|
||||
err = handler.module.UpdateStatus(ctx, valuer.MustNewUUID(claims.OrgID), serviceAccount)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (handler *handler) Delete(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
id, err := valuer.NewUUID(mux.Vars(r)["id"])
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.module.Delete(ctx, valuer.MustNewUUID(claims.OrgID), id)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (handler *handler) CreateFactorAPIKey(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
id, err := valuer.NewUUID(mux.Vars(r)["id"])
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
req := new(serviceaccounttypes.PostableFactorAPIKey)
|
||||
if err := binding.JSON.BindBody(r.Body, req); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// this takes care of checking the existence of service account and the org constraint.
|
||||
serviceAccount, err := handler.module.GetWithoutRoles(ctx, valuer.MustNewUUID(claims.OrgID), id)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
factorAPIKey, err := serviceAccount.NewFactorAPIKey(req.Name, req.ExpiresAt)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.module.CreateFactorAPIKey(ctx, factorAPIKey)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusCreated, serviceaccounttypes.NewGettableFactorAPIKeyWithKey(factorAPIKey.ID, factorAPIKey.Key))
|
||||
}
|
||||
|
||||
func (handler *handler) ListFactorAPIKey(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
id, err := valuer.NewUUID(mux.Vars(r)["id"])
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
serviceAccount, err := handler.module.GetWithoutRoles(ctx, valuer.MustNewUUID(claims.OrgID), id)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
factorAPIKeys, err := handler.module.ListFactorAPIKey(ctx, serviceAccount.ID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, serviceaccounttypes.NewGettableFactorAPIKeys(factorAPIKeys))
|
||||
}
|
||||
|
||||
func (handler *handler) UpdateFactorAPIKey(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
id, err := valuer.NewUUID(mux.Vars(r)["id"])
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
factorAPIKeyID, err := valuer.NewUUID(mux.Vars(r)["fid"])
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
req := new(serviceaccounttypes.UpdatableFactorAPIKey)
|
||||
if err := binding.JSON.BindBody(r.Body, req); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
serviceAccount, err := handler.module.GetWithoutRoles(ctx, valuer.MustNewUUID(claims.OrgID), id)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
factorAPIKey, err := handler.module.GetFactorAPIKey(ctx, serviceAccount.ID, factorAPIKeyID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
factorAPIKey.Update(req.Name, req.ExpiresAt)
|
||||
err = handler.module.UpdateFactorAPIKey(ctx, serviceAccount.ID, factorAPIKey)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (handler *handler) RevokeFactorAPIKey(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
id, err := valuer.NewUUID(mux.Vars(r)["id"])
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
factorAPIKeyID, err := valuer.NewUUID(mux.Vars(r)["fid"])
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
serviceAccount, err := handler.module.GetWithoutRoles(ctx, valuer.MustNewUUID(claims.OrgID), id)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.module.RevokeFactorAPIKey(ctx, serviceAccount.ID, factorAPIKeyID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
351
pkg/modules/serviceaccount/implserviceaccount/module.go
Normal file
351
pkg/modules/serviceaccount/implserviceaccount/module.go
Normal file
@@ -0,0 +1,351 @@
|
||||
package implserviceaccount
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/emailing"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/emailtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/serviceaccounttypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type module struct {
|
||||
store serviceaccounttypes.Store
|
||||
authz authz.AuthZ
|
||||
emailing emailing.Emailing
|
||||
settings factory.ScopedProviderSettings
|
||||
}
|
||||
|
||||
func NewModule(store serviceaccounttypes.Store, authz authz.AuthZ, emailing emailing.Emailing, providerSettings factory.ProviderSettings) serviceaccount.Module {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/modules/serviceaccount/implserviceaccount")
|
||||
return &module{store: store, authz: authz, emailing: emailing, settings: settings}
|
||||
}
|
||||
|
||||
func (module *module) Create(ctx context.Context, orgID valuer.UUID, serviceAccount *serviceaccounttypes.ServiceAccount) error {
|
||||
// validates the presence of all roles passed in the create request
|
||||
roles, err := module.authz.ListByOrgIDAndNames(ctx, orgID, serviceAccount.Roles)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// authz actions cannot run in sql transactions
|
||||
err = module.authz.Grant(ctx, orgID, serviceAccount.Roles, authtypes.MustNewSubject(authtypes.TypeableUser, serviceAccount.ID.String(), orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
storableServiceAccount := serviceaccounttypes.NewStorableServiceAccount(serviceAccount)
|
||||
storableServiceAccountRoles := serviceaccounttypes.NewStorableServiceAccountRoles(serviceAccount.ID, roles)
|
||||
err = module.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
err := module.store.Create(ctx, storableServiceAccount)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.CreateServiceAccountRoles(ctx, storableServiceAccountRoles)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*serviceaccounttypes.ServiceAccount, error) {
|
||||
storableServiceAccount, err := module.store.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// did the orchestration on application layer instead of DB as the ORM also does it anyways for many to many tables.
|
||||
storableServiceAccountRoles, err := module.store.GetServiceAccountRoles(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
roleIDs := make([]valuer.UUID, len(storableServiceAccountRoles))
|
||||
for idx, sar := range storableServiceAccountRoles {
|
||||
roleIDs[idx] = valuer.MustNewUUID(sar.RoleID)
|
||||
}
|
||||
|
||||
roles, err := module.authz.ListByOrgIDAndIDs(ctx, orgID, roleIDs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rolesNames, err := serviceaccounttypes.NewRolesFromStorableServiceAccountRoles(storableServiceAccountRoles, roles)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
serviceAccount := serviceaccounttypes.NewServiceAccountFromStorables(storableServiceAccount, rolesNames)
|
||||
return serviceAccount, nil
|
||||
}
|
||||
|
||||
func (module *module) GetWithoutRoles(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*serviceaccounttypes.ServiceAccount, error) {
|
||||
storableServiceAccount, err := module.store.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// passing []string{} (not nil to prevent panics) roles as the function isn't supposed to put roles.
|
||||
serviceAccount := serviceaccounttypes.NewServiceAccountFromStorables(storableServiceAccount, []string{})
|
||||
return serviceAccount, nil
|
||||
}
|
||||
|
||||
func (module *module) List(ctx context.Context, orgID valuer.UUID) ([]*serviceaccounttypes.ServiceAccount, error) {
|
||||
storableServiceAccounts, err := module.store.List(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
storableServiceAccountRoles, err := module.store.ListServiceAccountRolesByOrgID(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// convert the service account roles to structured data
|
||||
saIDToRoleIDs, roleIDs := serviceaccounttypes.GetUniqueRolesAndServiceAccountMapping(storableServiceAccountRoles)
|
||||
roles, err := module.authz.ListByOrgIDAndIDs(ctx, orgID, roleIDs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// fill in the role fetched data back to service account
|
||||
serviceAccounts := serviceaccounttypes.NewServiceAccountsFromRoles(storableServiceAccounts, roles, saIDToRoleIDs)
|
||||
return serviceAccounts, nil
|
||||
}
|
||||
|
||||
func (module *module) Update(ctx context.Context, orgID valuer.UUID, input *serviceaccounttypes.ServiceAccount) error {
|
||||
serviceAccount, err := module.Get(ctx, orgID, input.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
roles, err := module.authz.ListByOrgIDAndNames(ctx, orgID, input.Roles)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// gets the role diff if any to modify grants.
|
||||
grants, revokes := serviceAccount.PatchRoles(input)
|
||||
err = module.authz.ModifyGrant(ctx, orgID, revokes, grants, authtypes.MustNewSubject(authtypes.TypeableUser, serviceAccount.ID.String(), orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
storableServiceAccountRoles := serviceaccounttypes.NewStorableServiceAccountRoles(serviceAccount.ID, roles)
|
||||
err = module.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
err := module.store.Update(ctx, orgID, serviceaccounttypes.NewStorableServiceAccount(input))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// delete all the service account roles and create new rather than diff here.
|
||||
err = module.store.DeleteServiceAccountRoles(ctx, input.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.CreateServiceAccountRoles(ctx, storableServiceAccountRoles)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) UpdateStatus(ctx context.Context, orgID valuer.UUID, input *serviceaccounttypes.ServiceAccount) error {
|
||||
serviceAccount, err := module.Get(ctx, orgID, input.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if input.Status == serviceAccount.Status {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch input.Status {
|
||||
case serviceaccounttypes.StatusActive:
|
||||
err := module.activateServiceAccount(ctx, orgID, input)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case serviceaccounttypes.StatusDisabled:
|
||||
err := module.disableServiceAccount(ctx, orgID, input)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
|
||||
serviceAccount, err := module.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// revoke from authz first as this cannot run in sql transaction
|
||||
err = module.authz.Revoke(ctx, orgID, serviceAccount.Roles, authtypes.MustNewSubject(authtypes.TypeableUser, serviceAccount.ID.String(), orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
err := module.store.DeleteServiceAccountRoles(ctx, serviceAccount.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.RevokeAllFactorAPIKeys(ctx, serviceAccount.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.Delete(ctx, serviceAccount.OrgID, serviceAccount.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) CreateFactorAPIKey(ctx context.Context, factorAPIKey *serviceaccounttypes.FactorAPIKey) error {
|
||||
storableFactorAPIKey := serviceaccounttypes.NewStorableFactorAPIKey(factorAPIKey)
|
||||
|
||||
err := module.store.CreateFactorAPIKey(ctx, storableFactorAPIKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
serviceAccount, err := module.store.GetByID(ctx, factorAPIKey.ServiceAccountID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := module.emailing.SendHTML(ctx, serviceAccount.Email, "New API Key created for your SigNoz account", emailtypes.TemplateNameAPIKeyEvent, map[string]any{
|
||||
"Name": serviceAccount.Name,
|
||||
"KeyName": factorAPIKey.Name,
|
||||
"KeyID": factorAPIKey.ID.String(),
|
||||
"KeyCreatedAt": factorAPIKey.CreatedAt.String(),
|
||||
}); err != nil {
|
||||
module.settings.Logger().ErrorContext(ctx, "failed to send email", "error", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) GetFactorAPIKey(ctx context.Context, serviceAccountID valuer.UUID, id valuer.UUID) (*serviceaccounttypes.FactorAPIKey, error) {
|
||||
storableFactorAPIKey, err := module.store.GetFactorAPIKey(ctx, serviceAccountID, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return serviceaccounttypes.NewFactorAPIKeyFromStorable(storableFactorAPIKey), nil
|
||||
}
|
||||
|
||||
func (module *module) ListFactorAPIKey(ctx context.Context, serviceAccountID valuer.UUID) ([]*serviceaccounttypes.FactorAPIKey, error) {
|
||||
storables, err := module.store.ListFactorAPIKey(ctx, serviceAccountID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return serviceaccounttypes.NewFactorAPIKeyFromStorables(storables), nil
|
||||
}
|
||||
|
||||
func (module *module) UpdateFactorAPIKey(ctx context.Context, serviceAccountID valuer.UUID, factorAPIKey *serviceaccounttypes.FactorAPIKey) error {
|
||||
return module.store.UpdateFactorAPIKey(ctx, serviceAccountID, serviceaccounttypes.NewStorableFactorAPIKey(factorAPIKey))
|
||||
}
|
||||
|
||||
func (module *module) RevokeFactorAPIKey(ctx context.Context, serviceAccountID valuer.UUID, id valuer.UUID) error {
|
||||
factorAPIKey, err := module.GetFactorAPIKey(ctx, serviceAccountID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.RevokeFactorAPIKey(ctx, serviceAccountID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
serviceAccount, err := module.store.GetByID(ctx, serviceAccountID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := module.emailing.SendHTML(ctx, serviceAccount.Email, "API Key revoked for your SigNoz account", emailtypes.TemplateNameAPIKeyEvent, map[string]any{
|
||||
"Name": serviceAccount.Name,
|
||||
"KeyName": factorAPIKey.Name,
|
||||
"KeyID": factorAPIKey.ID.String(),
|
||||
"KeyCreatedAt": factorAPIKey.CreatedAt.String(),
|
||||
}); err != nil {
|
||||
module.settings.Logger().ErrorContext(ctx, "failed to send email", "error", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) disableServiceAccount(ctx context.Context, orgID valuer.UUID, input *serviceaccounttypes.ServiceAccount) error {
|
||||
err := module.authz.Revoke(ctx, orgID, input.Roles, authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.String(), orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
// revoke all the API keys on disable
|
||||
err := module.store.RevokeAllFactorAPIKeys(ctx, input.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// update the status but do not delete the role mappings as we will reuse them on activation.
|
||||
err = module.Update(ctx, orgID, input)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) activateServiceAccount(ctx context.Context, orgID valuer.UUID, input *serviceaccounttypes.ServiceAccount) error {
|
||||
err := module.authz.Grant(ctx, orgID, input.Roles, authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.String(), orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.Update(ctx, orgID, input)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
282
pkg/modules/serviceaccount/implserviceaccount/store.go
Normal file
282
pkg/modules/serviceaccount/implserviceaccount/store.go
Normal file
@@ -0,0 +1,282 @@
|
||||
package implserviceaccount
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/serviceaccounttypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type store struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func NewStore(sqlstore sqlstore.SQLStore) serviceaccounttypes.Store {
|
||||
return &store{sqlstore: sqlstore}
|
||||
}
|
||||
|
||||
func (store *store) Create(ctx context.Context, storable *serviceaccounttypes.StorableServiceAccount) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewInsert().
|
||||
Model(storable).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return store.sqlstore.WrapAlreadyExistsErrf(err, serviceaccounttypes.ErrCodeServiceAccountAlreadyExists, "service account with id: %s already exists", storable.ID)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*serviceaccounttypes.StorableServiceAccount, error) {
|
||||
storable := new(serviceaccounttypes.StorableServiceAccount)
|
||||
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(storable).
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, serviceaccounttypes.ErrCodeServiceAccountNotFound, "service account with id: %s doesn't exist in org: %s", id, orgID)
|
||||
}
|
||||
|
||||
return storable, nil
|
||||
}
|
||||
|
||||
func (store *store) GetByID(ctx context.Context, id valuer.UUID) (*serviceaccounttypes.StorableServiceAccount, error) {
|
||||
storable := new(serviceaccounttypes.StorableServiceAccount)
|
||||
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(storable).
|
||||
Where("id = ?", id).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, serviceaccounttypes.ErrCodeServiceAccountNotFound, "service account with id: %s doesn't exist", id)
|
||||
}
|
||||
|
||||
return storable, nil
|
||||
}
|
||||
|
||||
func (store *store) List(ctx context.Context, orgID valuer.UUID) ([]*serviceaccounttypes.StorableServiceAccount, error) {
|
||||
storables := make([]*serviceaccounttypes.StorableServiceAccount, 0)
|
||||
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(&storables).
|
||||
Where("org_id = ?", orgID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return storables, nil
|
||||
}
|
||||
|
||||
func (store *store) Update(ctx context.Context, orgID valuer.UUID, storable *serviceaccounttypes.StorableServiceAccount) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewUpdate().
|
||||
Model(storable).
|
||||
WherePK().
|
||||
Where("org_id = ?", orgID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewDelete().
|
||||
Model(new(serviceaccounttypes.StorableServiceAccount)).
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) CreateServiceAccountRoles(ctx context.Context, storables []*serviceaccounttypes.StorableServiceAccountRole) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewInsert().
|
||||
Model(&storables).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return store.sqlstore.WrapAlreadyExistsErrf(err, serviceaccounttypes.ErrCodeServiceAccountRoleAlreadyExists, "duplicate role assignments for service account")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) GetServiceAccountRoles(ctx context.Context, id valuer.UUID) ([]*serviceaccounttypes.StorableServiceAccountRole, error) {
|
||||
storables := make([]*serviceaccounttypes.StorableServiceAccountRole, 0)
|
||||
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(&storables).
|
||||
Where("service_account_id = ?", id).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
// no need to wrap not found here as this is many to many table
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return storables, nil
|
||||
}
|
||||
|
||||
func (store *store) ListServiceAccountRolesByOrgID(ctx context.Context, orgID valuer.UUID) ([]*serviceaccounttypes.StorableServiceAccountRole, error) {
|
||||
storables := make([]*serviceaccounttypes.StorableServiceAccountRole, 0)
|
||||
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(&storables).
|
||||
Join("JOIN service_account").
|
||||
JoinOn("service_account.id = service_account_role.service_account_id").
|
||||
Where("service_account.org_id = ?", orgID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return storables, nil
|
||||
}
|
||||
|
||||
func (store *store) DeleteServiceAccountRoles(ctx context.Context, id valuer.UUID) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewDelete().
|
||||
Model(new(serviceaccounttypes.StorableServiceAccountRole)).
|
||||
Where("service_account_id = ?", id).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) CreateFactorAPIKey(ctx context.Context, storable *serviceaccounttypes.StorableFactorAPIKey) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewInsert().
|
||||
Model(storable).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return store.sqlstore.WrapAlreadyExistsErrf(err, serviceaccounttypes.ErrCodeServiceAccountFactorAPIKeyAlreadyExists, "api key with name: %s already exists for service account: %s", storable.Name, storable.ServiceAccountID)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) GetFactorAPIKey(ctx context.Context, serviceAccountID valuer.UUID, id valuer.UUID) (*serviceaccounttypes.StorableFactorAPIKey, error) {
|
||||
storable := new(serviceaccounttypes.StorableFactorAPIKey)
|
||||
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(storable).
|
||||
Where("id = ?", id).
|
||||
Where("service_account_id = ?", serviceAccountID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, serviceaccounttypes.ErrCodeServiceAccounFactorAPIKeytNotFound, "api key with id: %s doesn't exist for service account: %s", id, serviceAccountID)
|
||||
}
|
||||
|
||||
return storable, nil
|
||||
}
|
||||
|
||||
func (store *store) ListFactorAPIKey(ctx context.Context, serviceAccountID valuer.UUID) ([]*serviceaccounttypes.StorableFactorAPIKey, error) {
|
||||
storables := make([]*serviceaccounttypes.StorableFactorAPIKey, 0)
|
||||
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(&storables).
|
||||
Where("service_account_id = ?", serviceAccountID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return storables, nil
|
||||
}
|
||||
|
||||
func (store *store) UpdateFactorAPIKey(ctx context.Context, serviceAccountID valuer.UUID, storable *serviceaccounttypes.StorableFactorAPIKey) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewUpdate().
|
||||
Model(storable).
|
||||
Where("service_account_id = ?", serviceAccountID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) RevokeFactorAPIKey(ctx context.Context, serviceAccountID valuer.UUID, id valuer.UUID) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewDelete().
|
||||
Model(new(serviceaccounttypes.StorableFactorAPIKey)).
|
||||
Where("service_account_id = ?", serviceAccountID).
|
||||
Where("id = ?", id).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) RevokeAllFactorAPIKeys(ctx context.Context, serviceAccountID valuer.UUID) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewDelete().
|
||||
Model(new(serviceaccounttypes.StorableFactorAPIKey)).
|
||||
Where("service_account_id = ?", serviceAccountID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) RunInTx(ctx context.Context, cb func(context.Context) error) error {
|
||||
return store.sqlstore.RunInTxCtx(ctx, nil, func(ctx context.Context) error {
|
||||
return cb(ctx)
|
||||
})
|
||||
}
|
||||
69
pkg/modules/serviceaccount/serviceaccount.go
Normal file
69
pkg/modules/serviceaccount/serviceaccount.go
Normal file
@@ -0,0 +1,69 @@
|
||||
package serviceaccount
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/serviceaccounttypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Module interface {
|
||||
// Creates a new service account for an organization.
|
||||
Create(context.Context, valuer.UUID, *serviceaccounttypes.ServiceAccount) error
|
||||
|
||||
// Gets a service account by id.
|
||||
Get(context.Context, valuer.UUID, valuer.UUID) (*serviceaccounttypes.ServiceAccount, error)
|
||||
|
||||
// Gets a service account by id without fetching roles.
|
||||
GetWithoutRoles(context.Context, valuer.UUID, valuer.UUID) (*serviceaccounttypes.ServiceAccount, error)
|
||||
|
||||
// List all service accounts for an organization.
|
||||
List(context.Context, valuer.UUID) ([]*serviceaccounttypes.ServiceAccount, error)
|
||||
|
||||
// Updates an existing service account
|
||||
Update(context.Context, valuer.UUID, *serviceaccounttypes.ServiceAccount) error
|
||||
|
||||
// Updates an existing service account status
|
||||
UpdateStatus(context.Context, valuer.UUID, *serviceaccounttypes.ServiceAccount) error
|
||||
|
||||
// Deletes an existing service account by id
|
||||
Delete(context.Context, valuer.UUID, valuer.UUID) error
|
||||
|
||||
// Creates a new API key for a service account
|
||||
CreateFactorAPIKey(context.Context, *serviceaccounttypes.FactorAPIKey) error
|
||||
|
||||
// Gets a factor API key by id
|
||||
GetFactorAPIKey(context.Context, valuer.UUID, valuer.UUID) (*serviceaccounttypes.FactorAPIKey, error)
|
||||
|
||||
// Lists all the API keys for a service account
|
||||
ListFactorAPIKey(context.Context, valuer.UUID) ([]*serviceaccounttypes.FactorAPIKey, error)
|
||||
|
||||
// Updates an existing API key for a service account
|
||||
UpdateFactorAPIKey(context.Context, valuer.UUID, *serviceaccounttypes.FactorAPIKey) error
|
||||
|
||||
// Revokes an existing API key for a service account
|
||||
RevokeFactorAPIKey(context.Context, valuer.UUID, valuer.UUID) error
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
Create(http.ResponseWriter, *http.Request)
|
||||
|
||||
Get(http.ResponseWriter, *http.Request)
|
||||
|
||||
List(http.ResponseWriter, *http.Request)
|
||||
|
||||
Update(http.ResponseWriter, *http.Request)
|
||||
|
||||
UpdateStatus(http.ResponseWriter, *http.Request)
|
||||
|
||||
Delete(http.ResponseWriter, *http.Request)
|
||||
|
||||
CreateFactorAPIKey(http.ResponseWriter, *http.Request)
|
||||
|
||||
ListFactorAPIKey(http.ResponseWriter, *http.Request)
|
||||
|
||||
UpdateFactorAPIKey(http.ResponseWriter, *http.Request)
|
||||
|
||||
RevokeFactorAPIKey(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
root "github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -462,7 +463,7 @@ func (h *handler) UpdateAPIKey(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email.String())) {
|
||||
if slices.Contains(integrationtypes.AllIntegrationUserEmails, integrationtypes.IntegrationUserEmail(createdByUser.Email.String())) {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked"))
|
||||
return
|
||||
}
|
||||
@@ -507,7 +508,7 @@ func (h *handler) RevokeAPIKey(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email.String())) {
|
||||
if slices.Contains(integrationtypes.AllIntegrationUserEmails, integrationtypes.IntegrationUserEmail(createdByUser.Email.String())) {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked"))
|
||||
return
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/emailtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/dustin/go-humanize"
|
||||
@@ -174,7 +175,7 @@ func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ..
|
||||
createUserOpts := root.NewCreateUserOptions(opts...)
|
||||
|
||||
// since assign is idempotant multiple calls to assign won't cause issues in case of retries.
|
||||
err := module.authz.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
|
||||
err := module.authz.Grant(ctx, input.OrgID, []string{roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role)}, authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -236,8 +237,8 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
|
||||
if user.Role != "" && user.Role != existingUser.Role {
|
||||
err = m.authz.ModifyGrant(ctx,
|
||||
orgID,
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role),
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role),
|
||||
[]string{roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role)},
|
||||
[]string{roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role)},
|
||||
authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil),
|
||||
)
|
||||
if err != nil {
|
||||
@@ -279,7 +280,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
|
||||
return errors.WithAdditionalf(err, "cannot delete root user")
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email.String())) {
|
||||
if slices.Contains(integrationtypes.AllIntegrationUserEmails, integrationtypes.IntegrationUserEmail(user.Email.String())) {
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "integration user cannot be deleted")
|
||||
}
|
||||
|
||||
@@ -294,7 +295,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
|
||||
}
|
||||
|
||||
// since revoke is idempotant multiple calls to revoke won't cause issues in case of retries
|
||||
err = module.authz.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
|
||||
err = module.authz.Revoke(ctx, orgID, []string{roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role)}, authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -159,8 +159,8 @@ func (s *service) createOrPromoteRootUser(ctx context.Context, orgID valuer.UUID
|
||||
if oldRole != types.RoleAdmin {
|
||||
if err := s.authz.ModifyGrant(ctx,
|
||||
orgID,
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(oldRole),
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(types.RoleAdmin),
|
||||
[]string{roletypes.MustGetSigNozManagedRoleFromExistingRole(oldRole)},
|
||||
[]string{roletypes.MustGetSigNozManagedRoleFromExistingRole(types.RoleAdmin)},
|
||||
authtypes.MustNewSubject(authtypes.TypeableUser, existingUser.ID.StringValue(), orgID, nil),
|
||||
); err != nil {
|
||||
return err
|
||||
|
||||
@@ -10,15 +10,16 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type cloudProviderAccountsRepository interface {
|
||||
listConnected(ctx context.Context, orgId string, provider string) ([]types.CloudIntegration, *model.ApiError)
|
||||
listConnected(ctx context.Context, orgId string, provider string) ([]integrationtypes.CloudIntegration, *model.ApiError)
|
||||
|
||||
get(ctx context.Context, orgId string, provider string, id string) (*types.CloudIntegration, *model.ApiError)
|
||||
get(ctx context.Context, orgId string, provider string, id string) (*integrationtypes.CloudIntegration, *model.ApiError)
|
||||
|
||||
getConnectedCloudAccount(ctx context.Context, orgId string, provider string, accountID string) (*types.CloudIntegration, *model.ApiError)
|
||||
getConnectedCloudAccount(ctx context.Context, orgId string, provider string, accountID string) (*integrationtypes.CloudIntegration, *model.ApiError)
|
||||
|
||||
// Insert an account or update it by (cloudProvider, id)
|
||||
// for specified non-empty fields
|
||||
@@ -27,11 +28,11 @@ type cloudProviderAccountsRepository interface {
|
||||
orgId string,
|
||||
provider string,
|
||||
id *string,
|
||||
config *types.AccountConfig,
|
||||
config *integrationtypes.AccountConfig,
|
||||
accountId *string,
|
||||
agentReport *types.AgentReport,
|
||||
agentReport *integrationtypes.AgentReport,
|
||||
removedAt *time.Time,
|
||||
) (*types.CloudIntegration, *model.ApiError)
|
||||
) (*integrationtypes.CloudIntegration, *model.ApiError)
|
||||
}
|
||||
|
||||
func newCloudProviderAccountsRepository(store sqlstore.SQLStore) (
|
||||
@@ -48,8 +49,8 @@ type cloudProviderAccountsSQLRepository struct {
|
||||
|
||||
func (r *cloudProviderAccountsSQLRepository) listConnected(
|
||||
ctx context.Context, orgId string, cloudProvider string,
|
||||
) ([]types.CloudIntegration, *model.ApiError) {
|
||||
accounts := []types.CloudIntegration{}
|
||||
) ([]integrationtypes.CloudIntegration, *model.ApiError) {
|
||||
accounts := []integrationtypes.CloudIntegration{}
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&accounts).
|
||||
@@ -72,8 +73,8 @@ func (r *cloudProviderAccountsSQLRepository) listConnected(
|
||||
|
||||
func (r *cloudProviderAccountsSQLRepository) get(
|
||||
ctx context.Context, orgId string, provider string, id string,
|
||||
) (*types.CloudIntegration, *model.ApiError) {
|
||||
var result types.CloudIntegration
|
||||
) (*integrationtypes.CloudIntegration, *model.ApiError) {
|
||||
var result integrationtypes.CloudIntegration
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&result).
|
||||
@@ -97,8 +98,8 @@ func (r *cloudProviderAccountsSQLRepository) get(
|
||||
|
||||
func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
|
||||
ctx context.Context, orgId string, provider string, accountId string,
|
||||
) (*types.CloudIntegration, *model.ApiError) {
|
||||
var result types.CloudIntegration
|
||||
) (*integrationtypes.CloudIntegration, *model.ApiError) {
|
||||
var result integrationtypes.CloudIntegration
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&result).
|
||||
@@ -127,11 +128,11 @@ func (r *cloudProviderAccountsSQLRepository) upsert(
|
||||
orgId string,
|
||||
provider string,
|
||||
id *string,
|
||||
config *types.AccountConfig,
|
||||
config *integrationtypes.AccountConfig,
|
||||
accountId *string,
|
||||
agentReport *types.AgentReport,
|
||||
agentReport *integrationtypes.AgentReport,
|
||||
removedAt *time.Time,
|
||||
) (*types.CloudIntegration, *model.ApiError) {
|
||||
) (*integrationtypes.CloudIntegration, *model.ApiError) {
|
||||
// Insert
|
||||
if id == nil {
|
||||
temp := valuer.GenerateUUID().StringValue()
|
||||
@@ -181,7 +182,7 @@ func (r *cloudProviderAccountsSQLRepository) upsert(
|
||||
)
|
||||
}
|
||||
|
||||
integration := types.CloudIntegration{
|
||||
integration := integrationtypes.CloudIntegration{
|
||||
OrgID: orgId,
|
||||
Provider: provider,
|
||||
Identifiable: types.Identifiable{ID: valuer.MustNewUUID(*id)},
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
@@ -52,7 +53,7 @@ func NewController(sqlStore sqlstore.SQLStore) (*Controller, error) {
|
||||
}
|
||||
|
||||
type ConnectedAccountsListResponse struct {
|
||||
Accounts []types.Account `json:"accounts"`
|
||||
Accounts []integrationtypes.Account `json:"accounts"`
|
||||
}
|
||||
|
||||
func (c *Controller) ListConnectedAccounts(ctx context.Context, orgId string, cloudProvider string) (
|
||||
@@ -67,7 +68,7 @@ func (c *Controller) ListConnectedAccounts(ctx context.Context, orgId string, cl
|
||||
return nil, model.WrapApiError(apiErr, "couldn't list cloud accounts")
|
||||
}
|
||||
|
||||
connectedAccounts := []types.Account{}
|
||||
connectedAccounts := []integrationtypes.Account{}
|
||||
for _, a := range accountRecords {
|
||||
connectedAccounts = append(connectedAccounts, a.Account())
|
||||
}
|
||||
@@ -81,7 +82,7 @@ type GenerateConnectionUrlRequest struct {
|
||||
// Optional. To be specified for updates.
|
||||
AccountId *string `json:"account_id,omitempty"`
|
||||
|
||||
AccountConfig types.AccountConfig `json:"account_config"`
|
||||
AccountConfig integrationtypes.AccountConfig `json:"account_config"`
|
||||
|
||||
AgentConfig SigNozAgentConfig `json:"agent_config"`
|
||||
}
|
||||
@@ -149,9 +150,9 @@ func (c *Controller) GenerateConnectionUrl(ctx context.Context, orgId string, cl
|
||||
}
|
||||
|
||||
type AccountStatusResponse struct {
|
||||
Id string `json:"id"`
|
||||
CloudAccountId *string `json:"cloud_account_id,omitempty"`
|
||||
Status types.AccountStatus `json:"status"`
|
||||
Id string `json:"id"`
|
||||
CloudAccountId *string `json:"cloud_account_id,omitempty"`
|
||||
Status integrationtypes.AccountStatus `json:"status"`
|
||||
}
|
||||
|
||||
func (c *Controller) GetAccountStatus(ctx context.Context, orgId string, cloudProvider string, accountId string) (
|
||||
@@ -217,7 +218,7 @@ func (c *Controller) CheckInAsAgent(ctx context.Context, orgId string, cloudProv
|
||||
))
|
||||
}
|
||||
|
||||
agentReport := types.AgentReport{
|
||||
agentReport := integrationtypes.AgentReport{
|
||||
TimestampMillis: time.Now().UnixMilli(),
|
||||
Data: req.Data,
|
||||
}
|
||||
@@ -286,10 +287,10 @@ func (c *Controller) CheckInAsAgent(ctx context.Context, orgId string, cloudProv
|
||||
}
|
||||
|
||||
type UpdateAccountConfigRequest struct {
|
||||
Config types.AccountConfig `json:"config"`
|
||||
Config integrationtypes.AccountConfig `json:"config"`
|
||||
}
|
||||
|
||||
func (c *Controller) UpdateAccountConfig(ctx context.Context, orgId string, cloudProvider string, accountId string, req UpdateAccountConfigRequest) (*types.Account, *model.ApiError) {
|
||||
func (c *Controller) UpdateAccountConfig(ctx context.Context, orgId string, cloudProvider string, accountId string, req UpdateAccountConfigRequest) (*integrationtypes.Account, *model.ApiError) {
|
||||
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
@@ -306,7 +307,7 @@ func (c *Controller) UpdateAccountConfig(ctx context.Context, orgId string, clou
|
||||
return &account, nil
|
||||
}
|
||||
|
||||
func (c *Controller) DisconnectAccount(ctx context.Context, orgId string, cloudProvider string, accountId string) (*types.CloudIntegration, *model.ApiError) {
|
||||
func (c *Controller) DisconnectAccount(ctx context.Context, orgId string, cloudProvider string, accountId string) (*integrationtypes.CloudIntegration, *model.ApiError) {
|
||||
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
@@ -346,7 +347,7 @@ func (c *Controller) ListServices(
|
||||
return nil, model.WrapApiError(apiErr, "couldn't list cloud services")
|
||||
}
|
||||
|
||||
svcConfigs := map[string]*types.CloudServiceConfig{}
|
||||
svcConfigs := map[string]*integrationtypes.CloudServiceConfig{}
|
||||
if cloudAccountId != nil {
|
||||
activeAccount, apiErr := c.accountsRepo.getConnectedCloudAccount(
|
||||
ctx, orgID, cloudProvider, *cloudAccountId,
|
||||
@@ -441,8 +442,8 @@ func (c *Controller) GetServiceDetails(
|
||||
}
|
||||
|
||||
type UpdateServiceConfigRequest struct {
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
Config types.CloudServiceConfig `json:"config"`
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
Config integrationtypes.CloudServiceConfig `json:"config"`
|
||||
}
|
||||
|
||||
func (u *UpdateServiceConfigRequest) Validate(def *services.Definition) error {
|
||||
@@ -460,8 +461,8 @@ func (u *UpdateServiceConfigRequest) Validate(def *services.Definition) error {
|
||||
}
|
||||
|
||||
type UpdateServiceConfigResponse struct {
|
||||
Id string `json:"id"`
|
||||
Config types.CloudServiceConfig `json:"config"`
|
||||
Id string `json:"id"`
|
||||
Config integrationtypes.CloudServiceConfig `json:"config"`
|
||||
}
|
||||
|
||||
func (c *Controller) UpdateServiceConfig(
|
||||
|
||||
@@ -3,20 +3,20 @@ package cloudintegrations
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationtypes"
|
||||
)
|
||||
|
||||
type ServiceSummary struct {
|
||||
services.Metadata
|
||||
|
||||
Config *types.CloudServiceConfig `json:"config"`
|
||||
Config *integrationtypes.CloudServiceConfig `json:"config"`
|
||||
}
|
||||
|
||||
type ServiceDetails struct {
|
||||
services.Definition
|
||||
|
||||
Config *types.CloudServiceConfig `json:"config"`
|
||||
ConnectionStatus *ServiceConnectionStatus `json:"status,omitempty"`
|
||||
Config *integrationtypes.CloudServiceConfig `json:"config"`
|
||||
ConnectionStatus *ServiceConnectionStatus `json:"status,omitempty"`
|
||||
}
|
||||
|
||||
type AccountStatus struct {
|
||||
@@ -61,7 +61,7 @@ func NewCompiledCollectionStrategy(provider string) (*CompiledCollectionStrategy
|
||||
|
||||
// Helper for accumulating strategies for enabled services.
|
||||
func AddServiceStrategy(serviceType string, cs *CompiledCollectionStrategy,
|
||||
definitionStrat *services.CollectionStrategy, config *types.CloudServiceConfig) error {
|
||||
definitionStrat *services.CollectionStrategy, config *integrationtypes.CloudServiceConfig) error {
|
||||
if definitionStrat.Provider != cs.Provider {
|
||||
return errors.NewInternalf(CodeMismatchCloudProvider, "can't add %s service strategy to compiled strategy for %s",
|
||||
definitionStrat.Provider, cs.Provider)
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
@@ -18,7 +19,7 @@ type ServiceConfigDatabase interface {
|
||||
orgID string,
|
||||
cloudAccountId string,
|
||||
serviceType string,
|
||||
) (*types.CloudServiceConfig, *model.ApiError)
|
||||
) (*integrationtypes.CloudServiceConfig, *model.ApiError)
|
||||
|
||||
upsert(
|
||||
ctx context.Context,
|
||||
@@ -26,15 +27,15 @@ type ServiceConfigDatabase interface {
|
||||
cloudProvider string,
|
||||
cloudAccountId string,
|
||||
serviceId string,
|
||||
config types.CloudServiceConfig,
|
||||
) (*types.CloudServiceConfig, *model.ApiError)
|
||||
config integrationtypes.CloudServiceConfig,
|
||||
) (*integrationtypes.CloudServiceConfig, *model.ApiError)
|
||||
|
||||
getAllForAccount(
|
||||
ctx context.Context,
|
||||
orgID string,
|
||||
cloudAccountId string,
|
||||
) (
|
||||
configsBySvcId map[string]*types.CloudServiceConfig,
|
||||
configsBySvcId map[string]*integrationtypes.CloudServiceConfig,
|
||||
apiErr *model.ApiError,
|
||||
)
|
||||
}
|
||||
@@ -56,9 +57,9 @@ func (r *serviceConfigSQLRepository) get(
|
||||
orgID string,
|
||||
cloudAccountId string,
|
||||
serviceType string,
|
||||
) (*types.CloudServiceConfig, *model.ApiError) {
|
||||
) (*integrationtypes.CloudServiceConfig, *model.ApiError) {
|
||||
|
||||
var result types.CloudIntegrationService
|
||||
var result integrationtypes.CloudIntegrationService
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&result).
|
||||
@@ -89,14 +90,14 @@ func (r *serviceConfigSQLRepository) upsert(
|
||||
cloudProvider string,
|
||||
cloudAccountId string,
|
||||
serviceId string,
|
||||
config types.CloudServiceConfig,
|
||||
) (*types.CloudServiceConfig, *model.ApiError) {
|
||||
config integrationtypes.CloudServiceConfig,
|
||||
) (*integrationtypes.CloudServiceConfig, *model.ApiError) {
|
||||
|
||||
// get cloud integration id from account id
|
||||
// if the account is not connected, we don't need to upsert the config
|
||||
var cloudIntegrationId string
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model((*types.CloudIntegration)(nil)).
|
||||
Model((*integrationtypes.CloudIntegration)(nil)).
|
||||
Column("id").
|
||||
Where("provider = ?", cloudProvider).
|
||||
Where("account_id = ?", cloudAccountId).
|
||||
@@ -111,7 +112,7 @@ func (r *serviceConfigSQLRepository) upsert(
|
||||
))
|
||||
}
|
||||
|
||||
serviceConfig := types.CloudIntegrationService{
|
||||
serviceConfig := integrationtypes.CloudIntegrationService{
|
||||
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
@@ -139,8 +140,8 @@ func (r *serviceConfigSQLRepository) getAllForAccount(
|
||||
ctx context.Context,
|
||||
orgID string,
|
||||
cloudAccountId string,
|
||||
) (map[string]*types.CloudServiceConfig, *model.ApiError) {
|
||||
serviceConfigs := []types.CloudIntegrationService{}
|
||||
) (map[string]*integrationtypes.CloudServiceConfig, *model.ApiError) {
|
||||
serviceConfigs := []integrationtypes.CloudIntegrationService{}
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&serviceConfigs).
|
||||
@@ -154,7 +155,7 @@ func (r *serviceConfigSQLRepository) getAllForAccount(
|
||||
))
|
||||
}
|
||||
|
||||
result := map[string]*types.CloudServiceConfig{}
|
||||
result := map[string]*integrationtypes.CloudServiceConfig{}
|
||||
|
||||
for _, r := range serviceConfigs {
|
||||
result[r.Type] = &r.Config
|
||||
|
||||
@@ -572,6 +572,9 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
aH.LicensingAPI.Activate(rw, req)
|
||||
})).Methods(http.MethodGet)
|
||||
|
||||
// Export
|
||||
router.HandleFunc("/api/v1/export_raw_data", am.ViewAccess(aH.Signoz.Handlers.RawDataExport.ExportRawData)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/span_percentile", am.ViewAccess(aH.Signoz.Handlers.SpanPercentile.GetSpanPercentileDetails)).Methods(http.MethodPost)
|
||||
|
||||
// Query Filter Analyzer api used to extract metric names and grouping columns from a query
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -107,7 +108,7 @@ type IntegrationsListItem struct {
|
||||
|
||||
type Integration struct {
|
||||
IntegrationDetails
|
||||
Installation *types.InstalledIntegration `json:"installation"`
|
||||
Installation *integrationtypes.InstalledIntegration `json:"installation"`
|
||||
}
|
||||
|
||||
type Manager struct {
|
||||
@@ -223,7 +224,7 @@ func (m *Manager) InstallIntegration(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
integrationId string,
|
||||
config types.InstalledIntegrationConfig,
|
||||
config integrationtypes.InstalledIntegrationConfig,
|
||||
) (*IntegrationsListItem, *model.ApiError) {
|
||||
integrationDetails, apiErr := m.getIntegrationDetails(ctx, integrationId)
|
||||
if apiErr != nil {
|
||||
@@ -429,7 +430,7 @@ func (m *Manager) getInstalledIntegration(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
integrationId string,
|
||||
) (*types.InstalledIntegration, *model.ApiError) {
|
||||
) (*integrationtypes.InstalledIntegration, *model.ApiError) {
|
||||
iis, apiErr := m.installedIntegrationsRepo.get(
|
||||
ctx, orgId, []string{integrationId},
|
||||
)
|
||||
@@ -457,7 +458,7 @@ func (m *Manager) getInstalledIntegrations(
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
installedTypes := utils.MapSlice(installations, func(i types.InstalledIntegration) string {
|
||||
installedTypes := utils.MapSlice(installations, func(i integrationtypes.InstalledIntegration) string {
|
||||
return i.Type
|
||||
})
|
||||
integrationDetails, apiErr := m.availableIntegrationsRepo.get(ctx, installedTypes)
|
||||
|
||||
@@ -4,22 +4,22 @@ import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationtypes"
|
||||
)
|
||||
|
||||
type InstalledIntegrationsRepo interface {
|
||||
list(ctx context.Context, orgId string) ([]types.InstalledIntegration, *model.ApiError)
|
||||
list(ctx context.Context, orgId string) ([]integrationtypes.InstalledIntegration, *model.ApiError)
|
||||
|
||||
get(
|
||||
ctx context.Context, orgId string, integrationTypes []string,
|
||||
) (map[string]types.InstalledIntegration, *model.ApiError)
|
||||
) (map[string]integrationtypes.InstalledIntegration, *model.ApiError)
|
||||
|
||||
upsert(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
integrationType string,
|
||||
config types.InstalledIntegrationConfig,
|
||||
) (*types.InstalledIntegration, *model.ApiError)
|
||||
config integrationtypes.InstalledIntegrationConfig,
|
||||
) (*integrationtypes.InstalledIntegration, *model.ApiError)
|
||||
|
||||
delete(ctx context.Context, orgId string, integrationType string) *model.ApiError
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
@@ -26,8 +27,8 @@ func NewInstalledIntegrationsSqliteRepo(store sqlstore.SQLStore) (
|
||||
func (r *InstalledIntegrationsSqliteRepo) list(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
) ([]types.InstalledIntegration, *model.ApiError) {
|
||||
integrations := []types.InstalledIntegration{}
|
||||
) ([]integrationtypes.InstalledIntegration, *model.ApiError) {
|
||||
integrations := []integrationtypes.InstalledIntegration{}
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&integrations).
|
||||
@@ -44,8 +45,8 @@ func (r *InstalledIntegrationsSqliteRepo) list(
|
||||
|
||||
func (r *InstalledIntegrationsSqliteRepo) get(
|
||||
ctx context.Context, orgId string, integrationTypes []string,
|
||||
) (map[string]types.InstalledIntegration, *model.ApiError) {
|
||||
integrations := []types.InstalledIntegration{}
|
||||
) (map[string]integrationtypes.InstalledIntegration, *model.ApiError) {
|
||||
integrations := []integrationtypes.InstalledIntegration{}
|
||||
|
||||
typeValues := []interface{}{}
|
||||
for _, integrationType := range integrationTypes {
|
||||
@@ -62,7 +63,7 @@ func (r *InstalledIntegrationsSqliteRepo) get(
|
||||
))
|
||||
}
|
||||
|
||||
result := map[string]types.InstalledIntegration{}
|
||||
result := map[string]integrationtypes.InstalledIntegration{}
|
||||
for _, ii := range integrations {
|
||||
result[ii.Type] = ii
|
||||
}
|
||||
@@ -74,10 +75,10 @@ func (r *InstalledIntegrationsSqliteRepo) upsert(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
integrationType string,
|
||||
config types.InstalledIntegrationConfig,
|
||||
) (*types.InstalledIntegration, *model.ApiError) {
|
||||
config integrationtypes.InstalledIntegrationConfig,
|
||||
) (*integrationtypes.InstalledIntegration, *model.ApiError) {
|
||||
|
||||
integration := types.InstalledIntegration{
|
||||
integration := integrationtypes.InstalledIntegration{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
@@ -114,7 +115,7 @@ func (r *InstalledIntegrationsSqliteRepo) delete(
|
||||
ctx context.Context, orgId string, integrationType string,
|
||||
) *model.ApiError {
|
||||
_, dbErr := r.store.BunDB().NewDelete().
|
||||
Model(&types.InstalledIntegration{}).
|
||||
Model(&integrationtypes.InstalledIntegration{}).
|
||||
Where("type = ?", integrationType).
|
||||
Where("org_id = ?", orgId).
|
||||
Exec(ctx)
|
||||
|
||||
@@ -24,6 +24,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/modules/savedview"
|
||||
"github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount/implserviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/modules/services"
|
||||
"github.com/SigNoz/signoz/pkg/modules/services/implservices"
|
||||
"github.com/SigNoz/signoz/pkg/modules/spanpercentile"
|
||||
@@ -36,22 +38,23 @@ import (
|
||||
)
|
||||
|
||||
type Handlers struct {
|
||||
SavedView savedview.Handler
|
||||
Apdex apdex.Handler
|
||||
Dashboard dashboard.Handler
|
||||
QuickFilter quickfilter.Handler
|
||||
TraceFunnel tracefunnel.Handler
|
||||
RawDataExport rawdataexport.Handler
|
||||
SpanPercentile spanpercentile.Handler
|
||||
Services services.Handler
|
||||
MetricsExplorer metricsexplorer.Handler
|
||||
Global global.Handler
|
||||
FlaggerHandler flagger.Handler
|
||||
GatewayHandler gateway.Handler
|
||||
Fields fields.Handler
|
||||
AuthzHandler authz.Handler
|
||||
ZeusHandler zeus.Handler
|
||||
QuerierHandler querier.Handler
|
||||
SavedView savedview.Handler
|
||||
Apdex apdex.Handler
|
||||
Dashboard dashboard.Handler
|
||||
QuickFilter quickfilter.Handler
|
||||
TraceFunnel tracefunnel.Handler
|
||||
RawDataExport rawdataexport.Handler
|
||||
SpanPercentile spanpercentile.Handler
|
||||
Services services.Handler
|
||||
MetricsExplorer metricsexplorer.Handler
|
||||
Global global.Handler
|
||||
FlaggerHandler flagger.Handler
|
||||
GatewayHandler gateway.Handler
|
||||
Fields fields.Handler
|
||||
AuthzHandler authz.Handler
|
||||
ZeusHandler zeus.Handler
|
||||
QuerierHandler querier.Handler
|
||||
ServiceAccountHandler serviceaccount.Handler
|
||||
}
|
||||
|
||||
func NewHandlers(
|
||||
@@ -68,21 +71,22 @@ func NewHandlers(
|
||||
zeusService zeus.Zeus,
|
||||
) Handlers {
|
||||
return Handlers{
|
||||
SavedView: implsavedview.NewHandler(modules.SavedView),
|
||||
Apdex: implapdex.NewHandler(modules.Apdex),
|
||||
Dashboard: impldashboard.NewHandler(modules.Dashboard, providerSettings),
|
||||
QuickFilter: implquickfilter.NewHandler(modules.QuickFilter),
|
||||
TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel),
|
||||
RawDataExport: implrawdataexport.NewHandler(modules.RawDataExport),
|
||||
Services: implservices.NewHandler(modules.Services),
|
||||
MetricsExplorer: implmetricsexplorer.NewHandler(modules.MetricsExplorer),
|
||||
SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile),
|
||||
Global: signozglobal.NewHandler(global),
|
||||
FlaggerHandler: flagger.NewHandler(flaggerService),
|
||||
GatewayHandler: gateway.NewHandler(gatewayService),
|
||||
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
|
||||
AuthzHandler: signozauthzapi.NewHandler(authz),
|
||||
ZeusHandler: zeus.NewHandler(zeusService, licensing),
|
||||
QuerierHandler: querierHandler,
|
||||
SavedView: implsavedview.NewHandler(modules.SavedView),
|
||||
Apdex: implapdex.NewHandler(modules.Apdex),
|
||||
Dashboard: impldashboard.NewHandler(modules.Dashboard, providerSettings),
|
||||
QuickFilter: implquickfilter.NewHandler(modules.QuickFilter),
|
||||
TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel),
|
||||
RawDataExport: implrawdataexport.NewHandler(modules.RawDataExport),
|
||||
Services: implservices.NewHandler(modules.Services),
|
||||
MetricsExplorer: implmetricsexplorer.NewHandler(modules.MetricsExplorer),
|
||||
SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile),
|
||||
Global: signozglobal.NewHandler(global),
|
||||
FlaggerHandler: flagger.NewHandler(flaggerService),
|
||||
GatewayHandler: gateway.NewHandler(gatewayService),
|
||||
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
|
||||
AuthzHandler: signozauthzapi.NewHandler(authz),
|
||||
ZeusHandler: zeus.NewHandler(zeusService, licensing),
|
||||
QuerierHandler: querierHandler,
|
||||
ServiceAccountHandler: implserviceaccount.NewHandler(modules.ServiceAccount),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,6 +27,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/modules/savedview"
|
||||
"github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount/implserviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/modules/services"
|
||||
"github.com/SigNoz/signoz/pkg/modules/services/implservices"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
@@ -66,6 +68,7 @@ type Modules struct {
|
||||
SpanPercentile spanpercentile.Module
|
||||
MetricsExplorer metricsexplorer.Module
|
||||
Promote promote.Module
|
||||
ServiceAccount serviceaccount.Module
|
||||
}
|
||||
|
||||
func NewModules(
|
||||
@@ -110,5 +113,6 @@ func NewModules(
|
||||
Services: implservices.NewModule(querier, telemetryStore),
|
||||
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, ruleStore, dashboard, providerSettings, config.MetricsExplorer),
|
||||
Promote: implpromote.NewModule(telemetryMetadataStore, telemetryStore),
|
||||
ServiceAccount: implserviceaccount.NewModule(implserviceaccount.NewStore(sqlstore), authz, emailing, providerSettings),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
@@ -58,9 +58,9 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
|
||||
struct{ gateway.Handler }{},
|
||||
struct{ fields.Handler }{},
|
||||
struct{ authz.Handler }{},
|
||||
struct{ rawdataexport.Handler }{},
|
||||
struct{ zeus.Handler }{},
|
||||
struct{ querier.Handler }{},
|
||||
struct{ serviceaccount.Handler }{},
|
||||
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -253,9 +253,9 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
|
||||
handlers.GatewayHandler,
|
||||
handlers.Fields,
|
||||
handlers.AuthzHandler,
|
||||
handlers.RawDataExport,
|
||||
handlers.ZeusHandler,
|
||||
handlers.QuerierHandler,
|
||||
handlers.ServiceAccountHandler,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
@@ -189,9 +188,7 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
|
||||
}
|
||||
|
||||
// spatial_aggregation_cte
|
||||
if frag, args, err := b.buildSpatialAggregationCTE(ctx, start, end, query, keys); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" {
|
||||
if frag, args := b.buildSpatialAggregationCTE(ctx, start, end, query, keys); frag != "" {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
cteArgs = append(cteArgs, args)
|
||||
}
|
||||
@@ -522,14 +519,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
_ uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
_ map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, []any, error) {
|
||||
if query.Aggregations[0].SpaceAggregation.IsZero() {
|
||||
return "", nil, errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`]",
|
||||
)
|
||||
}
|
||||
) (string, []any) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
sb.Select("ts")
|
||||
@@ -546,7 +536,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) BuildFinalSelect(
|
||||
|
||||
@@ -122,7 +122,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "test_histogram_percentile",
|
||||
name: "test_histogram_percentile1",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
@@ -132,6 +132,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
MetricName: "signoz_latency",
|
||||
Type: metrictypes.HistogramType,
|
||||
Temporality: metrictypes.Delta,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationPercentile95,
|
||||
},
|
||||
},
|
||||
@@ -187,7 +188,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "test_histogram_percentile",
|
||||
name: "test_histogram_percentile2",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
@@ -197,6 +198,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
MetricName: "http_server_duration_bucket",
|
||||
Type: metrictypes.HistogramType,
|
||||
Temporality: metrictypes.Cumulative,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationPercentile95,
|
||||
},
|
||||
},
|
||||
@@ -211,7 +213,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts ORDER BY `service.name`, ts",
|
||||
Args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "http_server_duration_bucket", uint64(1747947390000), uint64(1747983420000), 0},
|
||||
Args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "http_server_duration_bucket", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
|
||||
578
pkg/types/cloudintegrationtypes/cloudintegration.go
Normal file
578
pkg/types/cloudintegrationtypes/cloudintegration.go
Normal file
@@ -0,0 +1,578 @@
|
||||
// NOTE:
|
||||
// - When Account keyword is used in struct names, it refers cloud integration account. CloudIntegration refers to DB schema.
|
||||
// - When Account Config keyword is used in struct names, it refers to configuration for cloud integration accounts
|
||||
// - When Service keyword is used in struct names, it refers to cloud integration service. CloudIntegrationService refers to DB schema.
|
||||
// where `service` is services provided by each cloud provider like AWS S3, Azure BlobStorage etc.
|
||||
// - When Service Config keyword is used in struct names, it refers to configuration for cloud integration services
|
||||
package cloudintegrationtypes
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/uptrace/bun"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// CloudProviderType type alias
|
||||
type CloudProviderType struct{ valuer.String }
|
||||
|
||||
var (
|
||||
CloudProviderTypeAWS = CloudProviderType{valuer.NewString("aws")}
|
||||
CloudProviderTypeAzure = CloudProviderType{valuer.NewString("azure")}
|
||||
)
|
||||
|
||||
var ErrCodeCloudProviderInvalidInput = errors.MustNewCode("invalid_cloud_provider")
|
||||
|
||||
// NewCloudProvider returns a new CloudProviderType from a string. It validates the input and returns an error if the input is not valid.
|
||||
func NewCloudProvider(provider string) (CloudProviderType, error) {
|
||||
switch provider {
|
||||
case CloudProviderTypeAWS.StringValue():
|
||||
return CloudProviderTypeAWS, nil
|
||||
case CloudProviderTypeAzure.StringValue():
|
||||
return CloudProviderTypeAzure, nil
|
||||
default:
|
||||
return CloudProviderType{}, errors.NewInvalidInputf(ErrCodeCloudProviderInvalidInput, "invalid cloud provider: %s", provider)
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
AWSIntegrationUserEmail = valuer.MustNewEmail("aws-integration@signoz.io")
|
||||
AzureIntegrationUserEmail = valuer.MustNewEmail("azure-integration@signoz.io")
|
||||
)
|
||||
|
||||
// CloudIntegrationUserEmails is the list of valid emails for Cloud One Click integrations.
|
||||
// This is used for validation and restrictions in different contexts, across codebase.
|
||||
var CloudIntegrationUserEmails = []valuer.Email{
|
||||
AWSIntegrationUserEmail,
|
||||
AzureIntegrationUserEmail,
|
||||
}
|
||||
|
||||
func IsCloudIntegrationDashboardUuid(dashboardUuid string) bool {
|
||||
parts := strings.SplitN(dashboardUuid, "--", 4)
|
||||
if len(parts) != 4 {
|
||||
return false
|
||||
}
|
||||
|
||||
return parts[0] == "cloud-integration"
|
||||
}
|
||||
|
||||
// GetCloudIntegrationDashboardID returns the cloud provider from dashboard id, if it's a cloud integration dashboard id.
|
||||
// throws an error if invalid format or invalid cloud provider is provided in the dashboard id.
|
||||
func GetCloudProviderFromDashboardID(dashboardUuid string) (CloudProviderType, error) {
|
||||
parts := strings.SplitN(dashboardUuid, "--", 4)
|
||||
if len(parts) != 4 {
|
||||
return CloudProviderType{}, errors.NewInvalidInputf(ErrCodeCloudProviderInvalidInput, "invalid dashboard uuid: %s", dashboardUuid)
|
||||
}
|
||||
|
||||
providerStr := parts[1]
|
||||
|
||||
cloudProvider, err := NewCloudProvider(providerStr)
|
||||
if err != nil {
|
||||
return CloudProviderType{}, err
|
||||
}
|
||||
|
||||
return cloudProvider, nil
|
||||
}
|
||||
|
||||
// Generic utility functions for JSON serialization/deserialization
|
||||
// this is helpful to return right errors from a common place and avoid repeating the same code in multiple places.
|
||||
// UnmarshalJSON is a generic function to unmarshal JSON data into any type
|
||||
func UnmarshalJSON[T any](src []byte, target *T) error {
|
||||
err := json.Unmarshal(src, target)
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't deserialize JSON",
|
||||
)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// MarshalJSON is a generic function to marshal any type to JSON
|
||||
func MarshalJSON[T any](source *T) ([]byte, error) {
|
||||
if source == nil {
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "source is nil")
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(source)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't serialize to JSON",
|
||||
)
|
||||
}
|
||||
return serialized, nil
|
||||
}
|
||||
|
||||
// GettableConnectedAccountsList is the response for listing connected accounts for a cloud provider.
|
||||
type GettableConnectedAccountsList struct {
|
||||
Accounts []*Account `json:"accounts"`
|
||||
}
|
||||
|
||||
// SigNozAgentConfig represents parameters required for agent deployment in cloud provider accounts
|
||||
// these represent parameters passed during agent deployment, how they are passed might change for each cloud provider but the purpose is same.
|
||||
type SigNozAgentConfig struct {
|
||||
Region string `json:"region,omitempty"` // AWS-specific: The region in which SigNoz agent should be installed
|
||||
|
||||
IngestionUrl string `json:"ingestion_url"`
|
||||
IngestionKey string `json:"ingestion_key"`
|
||||
SigNozAPIUrl string `json:"signoz_api_url"`
|
||||
SigNozAPIKey string `json:"signoz_api_key"`
|
||||
|
||||
Version string `json:"version,omitempty"`
|
||||
}
|
||||
|
||||
// PostableConnectionArtifact represent request body for generating connection artifact API.
|
||||
// Data is request body raw bytes since each cloud provider will have have different request body structure and generics hardly help in such cases.
|
||||
// Artifact is a generic name for different types of connection methods like connection URL for AWS, connection command for Azure etc.
|
||||
type PostableConnectionArtifact struct {
|
||||
OrgID string
|
||||
Data []byte // either PostableAWSConnectionUrl or PostableAzureConnectionCommand
|
||||
}
|
||||
|
||||
// PostableAWSConnectionUrl is request body for AWS connection artifact API
|
||||
type PostableAWSConnectionUrl struct {
|
||||
AgentConfig *SigNozAgentConfig `json:"agent_config"`
|
||||
AccountConfig *AWSAccountConfig `json:"account_config"`
|
||||
}
|
||||
|
||||
// PostableAzureConnectionCommand is request body for Azure connection artifact API
|
||||
type PostableAzureConnectionCommand struct {
|
||||
AgentConfig *SigNozAgentConfig `json:"agent_config"`
|
||||
AccountConfig *AzureAccountConfig `json:"account_config"`
|
||||
}
|
||||
|
||||
// GettableAzureConnectionArtifact is Azure specific connection artifact which contains connection commands for agent deployment
|
||||
type GettableAzureConnectionArtifact struct {
|
||||
AzureShellConnectionCommand string `json:"az_shell_connection_command"`
|
||||
AzureCliConnectionCommand string `json:"az_cli_connection_command"`
|
||||
}
|
||||
|
||||
// GettableAWSConnectionUrl is AWS specific connection artifact which contains connection url for agent deployment
|
||||
type GettableAWSConnectionUrl struct {
|
||||
AccountId string `json:"account_id"`
|
||||
ConnectionUrl string `json:"connection_url"`
|
||||
}
|
||||
|
||||
// GettableAzureConnectionCommand is Azure specific connection artifact which contains connection commands for agent deployment
|
||||
type GettableAzureConnectionCommand struct {
|
||||
AccountId string `json:"account_id"`
|
||||
AzureShellConnectionCommand string `json:"az_shell_connection_command"`
|
||||
AzureCliConnectionCommand string `json:"az_cli_connection_command"`
|
||||
}
|
||||
|
||||
// GettableAccountStatus is cloud integration account status response
|
||||
type GettableAccountStatus struct {
|
||||
Id string `json:"id"`
|
||||
CloudAccountId *string `json:"cloud_account_id,omitempty"`
|
||||
Status AccountStatus `json:"status"`
|
||||
}
|
||||
|
||||
// PostableAgentCheckInPayload is request body for agent check-in API.
|
||||
// This is used by agent to send heartbeat.
|
||||
type PostableAgentCheckInPayload struct {
|
||||
ID string `json:"account_id"`
|
||||
AccountID string `json:"cloud_account_id"`
|
||||
// Arbitrary cloud specific Agent data
|
||||
Data map[string]any `json:"data,omitempty"`
|
||||
OrgID string `json:"-"`
|
||||
}
|
||||
|
||||
// AWSAgentIntegrationConfig is used by agent for deploying infra to send telemetry to SigNoz
|
||||
type AWSAgentIntegrationConfig struct {
|
||||
EnabledRegions []string `json:"enabled_regions"`
|
||||
TelemetryCollectionStrategy *AWSCollectionStrategy `json:"telemetry,omitempty"`
|
||||
}
|
||||
|
||||
// AzureAgentIntegrationConfig is used by agent for deploying infra to send telemetry to SigNoz
|
||||
type AzureAgentIntegrationConfig struct {
|
||||
DeploymentRegion string `json:"deployment_region"` // will not be changed once set
|
||||
EnabledResourceGroups []string `json:"resource_groups"`
|
||||
// TelemetryCollectionStrategy is map of service to telemetry config
|
||||
TelemetryCollectionStrategy map[string]*AzureCollectionStrategy `json:"telemetry,omitempty"`
|
||||
}
|
||||
|
||||
// GettableAgentCheckInRes is generic response from agent check-in API.
|
||||
// AWSAgentIntegrationConfig and AzureAgentIntegrationConfig these configs are used by agent to deploy the infra and send telemetry to SigNoz
|
||||
type GettableAgentCheckInRes[AgentConfigT any] struct {
|
||||
AccountId string `json:"account_id"`
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
RemovedAt *time.Time `json:"removed_at"`
|
||||
IntegrationConfig AgentConfigT `json:"integration_config"`
|
||||
}
|
||||
|
||||
// UpdatableServiceConfig is generic
|
||||
type UpdatableServiceConfig[ServiceConfigT any] struct {
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
Config ServiceConfigT `json:"config"`
|
||||
}
|
||||
|
||||
// ServiceConfigTyped is a generic interface for cloud integration service's configuration
|
||||
// this is generic interface to define helper functions for CloudIntegrationService.Config field.
|
||||
type ServiceConfigTyped[definition Definition] interface {
|
||||
Validate(def definition) error
|
||||
IsMetricsEnabled() bool
|
||||
IsLogsEnabled() bool
|
||||
}
|
||||
|
||||
type AWSServiceConfig struct {
|
||||
Logs *AWSServiceLogsConfig `json:"logs,omitempty"`
|
||||
Metrics *AWSServiceMetricsConfig `json:"metrics,omitempty"`
|
||||
}
|
||||
|
||||
type AWSServiceLogsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
S3Buckets map[string][]string `json:"s3_buckets,omitempty"`
|
||||
}
|
||||
|
||||
type AWSServiceMetricsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
}
|
||||
|
||||
// IsMetricsEnabled returns true if metrics collection is configured and enabled
|
||||
func (a *AWSServiceConfig) IsMetricsEnabled() bool {
|
||||
return a.Metrics != nil && a.Metrics.Enabled
|
||||
}
|
||||
|
||||
// IsLogsEnabled returns true if logs collection is configured and enabled
|
||||
func (a *AWSServiceConfig) IsLogsEnabled() bool {
|
||||
return a.Logs != nil && a.Logs.Enabled
|
||||
}
|
||||
|
||||
type AzureServiceConfig struct {
|
||||
Logs []*AzureServiceLogsConfig `json:"logs,omitempty"`
|
||||
Metrics []*AzureServiceMetricsConfig `json:"metrics,omitempty"`
|
||||
}
|
||||
|
||||
// AzureServiceLogsConfig is Azure specific service config for logs
|
||||
type AzureServiceLogsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
// AzureServiceMetricsConfig is Azure specific service config for metrics
|
||||
type AzureServiceMetricsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
// IsMetricsEnabled returns true if any metric is configured and enabled
|
||||
func (a *AzureServiceConfig) IsMetricsEnabled() bool {
|
||||
if a.Metrics == nil {
|
||||
return false
|
||||
}
|
||||
for _, m := range a.Metrics {
|
||||
if m.Enabled {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsLogsEnabled returns true if any log is configured and enabled
|
||||
func (a *AzureServiceConfig) IsLogsEnabled() bool {
|
||||
if a.Logs == nil {
|
||||
return false
|
||||
}
|
||||
for _, l := range a.Logs {
|
||||
if l.Enabled {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (a *AWSServiceConfig) Validate(def *AWSDefinition) error {
|
||||
if def.Id != S3Sync.String() && a.Logs != nil && a.Logs.S3Buckets != nil {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "s3 buckets can only be added to service-type[%s]", S3Sync)
|
||||
} else if def.Id == S3Sync.String() && a.Logs != nil && a.Logs.S3Buckets != nil {
|
||||
for region := range a.Logs.S3Buckets {
|
||||
if _, found := ValidAWSRegions[region]; !found {
|
||||
return errors.NewInvalidInputf(CodeInvalidCloudRegion, "invalid cloud region: %s", region)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *AzureServiceConfig) Validate(def *AzureDefinition) error {
|
||||
logsMap := make(map[string]bool)
|
||||
metricsMap := make(map[string]bool)
|
||||
|
||||
if def.Strategy != nil && def.Strategy.Logs != nil {
|
||||
for _, log := range def.Strategy.Logs {
|
||||
logsMap[log.Name] = true
|
||||
}
|
||||
}
|
||||
|
||||
if def.Strategy != nil && def.Strategy.Metrics != nil {
|
||||
for _, metric := range def.Strategy.Metrics {
|
||||
metricsMap[metric.Name] = true
|
||||
}
|
||||
}
|
||||
|
||||
for _, log := range a.Logs {
|
||||
if _, found := logsMap[log.Name]; !found {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid log name: %s", log.Name)
|
||||
}
|
||||
}
|
||||
|
||||
for _, metric := range a.Metrics {
|
||||
if _, found := metricsMap[metric.Name]; !found {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid metric name: %s", metric.Name)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpdatableServiceConfigRes is response for UpdateServiceConfig API
|
||||
// TODO: find a better way to name this
|
||||
type UpdatableServiceConfigRes struct {
|
||||
ServiceId string `json:"id"`
|
||||
Config any `json:"config"`
|
||||
}
|
||||
|
||||
// UpdatableAccountConfigTyped is a generic struct for updating cloud integration account config used in UpdateAccountConfig API
|
||||
type UpdatableAccountConfigTyped[AccountConfigT any] struct {
|
||||
Config *AccountConfigT `json:"config"`
|
||||
}
|
||||
|
||||
type (
|
||||
UpdatableAWSAccountConfig = UpdatableAccountConfigTyped[AWSAccountConfig]
|
||||
UpdatableAzureAccountConfig = UpdatableAccountConfigTyped[AzureAccountConfig]
|
||||
)
|
||||
|
||||
// AWSAccountConfig is the configuration for AWS cloud integration account
|
||||
type AWSAccountConfig struct {
|
||||
EnabledRegions []string `json:"regions"`
|
||||
}
|
||||
|
||||
// AzureAccountConfig is the configuration for Azure cloud integration account
|
||||
type AzureAccountConfig struct {
|
||||
DeploymentRegion string `json:"deployment_region,omitempty"`
|
||||
EnabledResourceGroups []string `json:"resource_groups,omitempty"`
|
||||
}
|
||||
|
||||
// GettableServices is a generic struct for listing services of a cloud integration account used in ListServices API
|
||||
type GettableServices[ServiceSummaryT any] struct {
|
||||
Services []ServiceSummaryT `json:"services"`
|
||||
}
|
||||
|
||||
type (
|
||||
GettableAWSServices = GettableServices[AWSServiceSummary]
|
||||
GettableAzureServices = GettableServices[AzureServiceSummary]
|
||||
)
|
||||
|
||||
// GetServiceDetailsReq is a req struct for getting service definition details
|
||||
type GetServiceDetailsReq struct {
|
||||
OrgID valuer.UUID
|
||||
ServiceId string
|
||||
CloudAccountID *string
|
||||
}
|
||||
|
||||
// ServiceSummary is a generic struct for service summary used in ListServices API
|
||||
type ServiceSummary[ServiceConfigT any] struct {
|
||||
DefinitionMetadata
|
||||
Config *ServiceConfigT `json:"config"`
|
||||
}
|
||||
|
||||
type (
|
||||
AWSServiceSummary = ServiceSummary[AWSServiceConfig]
|
||||
AzureServiceSummary = ServiceSummary[AzureServiceConfig]
|
||||
)
|
||||
|
||||
// GettableServiceDetails is a generic struct for service details used in GetServiceDetails API
|
||||
type GettableServiceDetails[DefinitionT any, ServiceConfigT any] struct {
|
||||
Definition DefinitionT `json:",inline"`
|
||||
Config ServiceConfigT `json:"config"`
|
||||
ConnectionStatus *ServiceConnectionStatus `json:"status,omitempty"`
|
||||
}
|
||||
|
||||
type (
|
||||
GettableAWSServiceDetails = GettableServiceDetails[AWSDefinition, *AWSServiceConfig]
|
||||
GettableAzureServiceDetails = GettableServiceDetails[AzureDefinition, *AzureServiceConfig]
|
||||
)
|
||||
|
||||
// ServiceConnectionStatus represents integration connection status for a particular service
|
||||
// this struct helps to check ingested data and determines connection status by whether data was ingested or not.
|
||||
// this is composite struct for both metrics and logs
|
||||
type ServiceConnectionStatus struct {
|
||||
Logs []*SignalConnectionStatus `json:"logs"`
|
||||
Metrics []*SignalConnectionStatus `json:"metrics"`
|
||||
}
|
||||
|
||||
// SignalConnectionStatus represents connection status for a particular signal type (logs or metrics) for a service
|
||||
// this struct is used in API responses for clients to show relevant information about the connection status.
|
||||
type SignalConnectionStatus struct {
|
||||
CategoryID string `json:"category"`
|
||||
CategoryDisplayName string `json:"category_display_name"`
|
||||
LastReceivedTsMillis int64 `json:"last_received_ts_ms"` // epoch milliseconds
|
||||
LastReceivedFrom string `json:"last_received_from"` // resource identifier
|
||||
}
|
||||
|
||||
// GettableCloudIntegrationConnectionParams is response for connection params API
|
||||
type GettableCloudIntegrationConnectionParams struct {
|
||||
IngestionUrl string `json:"ingestion_url,omitempty"`
|
||||
IngestionKey string `json:"ingestion_key,omitempty"`
|
||||
SigNozAPIUrl string `json:"signoz_api_url,omitempty"`
|
||||
SigNozAPIKey string `json:"signoz_api_key,omitempty"`
|
||||
}
|
||||
|
||||
// GettableIngestionKey is a struct for ingestion key returned from gateway
|
||||
type GettableIngestionKey struct {
|
||||
Name string `json:"name"`
|
||||
Value string `json:"value"`
|
||||
// other attributes from gateway response not included here since they are not being used.
|
||||
}
|
||||
|
||||
// GettableIngestionKeysSearch is a struct for response of ingestion keys search API on gateway
|
||||
type GettableIngestionKeysSearch struct {
|
||||
Status string `json:"status"`
|
||||
Data []GettableIngestionKey `json:"data"`
|
||||
Error string `json:"error"`
|
||||
}
|
||||
|
||||
// GettableCreateIngestionKey is a struct for response of create ingestion key API on gateway
|
||||
type GettableCreateIngestionKey struct {
|
||||
Status string `json:"status"`
|
||||
Data GettableIngestionKey `json:"data"`
|
||||
Error string `json:"error"`
|
||||
}
|
||||
|
||||
// GettableDeployment is response struct for deployment details fetched from Zeus
|
||||
type GettableDeployment struct {
|
||||
Name string `json:"name"`
|
||||
ClusterInfo struct {
|
||||
Region struct {
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// DATABASE TYPES
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// Cloud integration uses the cloud_integration table
|
||||
// and cloud_integrations_service table
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
type CloudIntegration struct {
|
||||
bun.BaseModel `bun:"table:cloud_integration"`
|
||||
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Provider string `json:"provider" bun:"provider,type:text,unique:provider_id"`
|
||||
Config string `json:"config" bun:"config,type:text"` // json serialized config
|
||||
AccountID *string `json:"account_id" bun:"account_id,type:text"`
|
||||
LastAgentReport *AgentReport `json:"last_agent_report" bun:"last_agent_report,type:text"`
|
||||
RemovedAt *time.Time `json:"removed_at" bun:"removed_at,type:timestamp,nullzero"`
|
||||
OrgID string `bun:"org_id,type:text,unique:provider_id"`
|
||||
}
|
||||
|
||||
// Account represents a cloud integration account, this is used for business logic and API responses.
|
||||
type Account struct {
|
||||
Id string `json:"id"`
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
Config any `json:"config"` // AWSAccountConfig or AzureAccountConfig
|
||||
Status AccountStatus `json:"status"`
|
||||
}
|
||||
|
||||
// AccountStatus is generic struct for cloud integration account status
|
||||
type AccountStatus struct {
|
||||
Integration AccountIntegrationStatus `json:"integration"`
|
||||
}
|
||||
|
||||
// AccountIntegrationStatus stores heartbeat information from agent check in
|
||||
type AccountIntegrationStatus struct {
|
||||
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
|
||||
}
|
||||
|
||||
func (a *CloudIntegration) Status() AccountStatus {
|
||||
status := AccountStatus{}
|
||||
if a.LastAgentReport != nil {
|
||||
lastHeartbeat := a.LastAgentReport.TimestampMillis
|
||||
status.Integration.LastHeartbeatTsMillis = &lastHeartbeat
|
||||
}
|
||||
return status
|
||||
}
|
||||
|
||||
func (a *CloudIntegration) Account(cloudProvider CloudProviderType) *Account {
|
||||
ca := &Account{Id: a.ID.StringValue(), Status: a.Status()}
|
||||
|
||||
if a.AccountID != nil {
|
||||
ca.CloudAccountId = *a.AccountID
|
||||
}
|
||||
|
||||
ca.Config = map[string]interface{}{}
|
||||
|
||||
if len(a.Config) < 1 {
|
||||
return ca
|
||||
}
|
||||
|
||||
switch cloudProvider {
|
||||
case CloudProviderTypeAWS:
|
||||
config := new(AWSAccountConfig)
|
||||
_ = UnmarshalJSON([]byte(a.Config), config)
|
||||
ca.Config = config
|
||||
case CloudProviderTypeAzure:
|
||||
config := new(AzureAccountConfig)
|
||||
_ = UnmarshalJSON([]byte(a.Config), config)
|
||||
ca.Config = config
|
||||
default:
|
||||
}
|
||||
|
||||
return ca
|
||||
}
|
||||
|
||||
type AgentReport struct {
|
||||
TimestampMillis int64 `json:"timestamp_millis"`
|
||||
Data map[string]any `json:"data"`
|
||||
}
|
||||
|
||||
// Scan scans data from db
|
||||
func (r *AgentReport) Scan(src any) error {
|
||||
var data []byte
|
||||
switch v := src.(type) {
|
||||
case []byte:
|
||||
data = v
|
||||
case string:
|
||||
data = []byte(v)
|
||||
default:
|
||||
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
|
||||
}
|
||||
|
||||
return json.Unmarshal(data, r)
|
||||
}
|
||||
|
||||
// Value serializes data to bytes for db insertion
|
||||
func (r *AgentReport) Value() (driver.Value, error) {
|
||||
if r == nil {
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "agent report is nil")
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(r)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't serialize agent report to JSON",
|
||||
)
|
||||
}
|
||||
return serialized, nil
|
||||
}
|
||||
|
||||
type CloudIntegrationService struct {
|
||||
bun.BaseModel `bun:"table:cloud_integration_service,alias:cis"`
|
||||
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Type string `bun:"type,type:text,notnull,unique:cloud_integration_id_type"`
|
||||
Config string `bun:"config,type:text"` // json serialized config
|
||||
CloudIntegrationID string `bun:"cloud_integration_id,type:text,notnull,unique:cloud_integration_id_type,references:cloud_integrations(id),on_delete:cascade"`
|
||||
}
|
||||
103
pkg/types/cloudintegrationtypes/regions.go
Normal file
103
pkg/types/cloudintegrationtypes/regions.go
Normal file
@@ -0,0 +1,103 @@
|
||||
package cloudintegrationtypes
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeInvalidCloudRegion = errors.MustNewCode("invalid_cloud_region")
|
||||
CodeMismatchCloudProvider = errors.MustNewCode("cloud_provider_mismatch")
|
||||
)
|
||||
|
||||
// List of all valid cloud regions on Amazon Web Services
|
||||
var ValidAWSRegions = map[string]bool{
|
||||
"af-south-1": true, // Africa (Cape Town).
|
||||
"ap-east-1": true, // Asia Pacific (Hong Kong).
|
||||
"ap-northeast-1": true, // Asia Pacific (Tokyo).
|
||||
"ap-northeast-2": true, // Asia Pacific (Seoul).
|
||||
"ap-northeast-3": true, // Asia Pacific (Osaka).
|
||||
"ap-south-1": true, // Asia Pacific (Mumbai).
|
||||
"ap-south-2": true, // Asia Pacific (Hyderabad).
|
||||
"ap-southeast-1": true, // Asia Pacific (Singapore).
|
||||
"ap-southeast-2": true, // Asia Pacific (Sydney).
|
||||
"ap-southeast-3": true, // Asia Pacific (Jakarta).
|
||||
"ap-southeast-4": true, // Asia Pacific (Melbourne).
|
||||
"ca-central-1": true, // Canada (Central).
|
||||
"ca-west-1": true, // Canada West (Calgary).
|
||||
"eu-central-1": true, // Europe (Frankfurt).
|
||||
"eu-central-2": true, // Europe (Zurich).
|
||||
"eu-north-1": true, // Europe (Stockholm).
|
||||
"eu-south-1": true, // Europe (Milan).
|
||||
"eu-south-2": true, // Europe (Spain).
|
||||
"eu-west-1": true, // Europe (Ireland).
|
||||
"eu-west-2": true, // Europe (London).
|
||||
"eu-west-3": true, // Europe (Paris).
|
||||
"il-central-1": true, // Israel (Tel Aviv).
|
||||
"me-central-1": true, // Middle East (UAE).
|
||||
"me-south-1": true, // Middle East (Bahrain).
|
||||
"sa-east-1": true, // South America (Sao Paulo).
|
||||
"us-east-1": true, // US East (N. Virginia).
|
||||
"us-east-2": true, // US East (Ohio).
|
||||
"us-west-1": true, // US West (N. California).
|
||||
"us-west-2": true, // US West (Oregon).
|
||||
}
|
||||
|
||||
// List of all valid cloud regions for Microsoft Azure
|
||||
var ValidAzureRegions = map[string]bool{
|
||||
"australiacentral": true, // Australia Central
|
||||
"australiacentral2": true, // Australia Central 2
|
||||
"australiaeast": true, // Australia East
|
||||
"australiasoutheast": true, // Australia Southeast
|
||||
"austriaeast": true, // Austria East
|
||||
"belgiumcentral": true, // Belgium Central
|
||||
"brazilsouth": true, // Brazil South
|
||||
"brazilsoutheast": true, // Brazil Southeast
|
||||
"canadacentral": true, // Canada Central
|
||||
"canadaeast": true, // Canada East
|
||||
"centralindia": true, // Central India
|
||||
"centralus": true, // Central US
|
||||
"chilecentral": true, // Chile Central
|
||||
"denmarkeast": true, // Denmark East
|
||||
"eastasia": true, // East Asia
|
||||
"eastus": true, // East US
|
||||
"eastus2": true, // East US 2
|
||||
"francecentral": true, // France Central
|
||||
"francesouth": true, // France South
|
||||
"germanynorth": true, // Germany North
|
||||
"germanywestcentral": true, // Germany West Central
|
||||
"indonesiacentral": true, // Indonesia Central
|
||||
"israelcentral": true, // Israel Central
|
||||
"italynorth": true, // Italy North
|
||||
"japaneast": true, // Japan East
|
||||
"japanwest": true, // Japan West
|
||||
"koreacentral": true, // Korea Central
|
||||
"koreasouth": true, // Korea South
|
||||
"malaysiawest": true, // Malaysia West
|
||||
"mexicocentral": true, // Mexico Central
|
||||
"newzealandnorth": true, // New Zealand North
|
||||
"northcentralus": true, // North Central US
|
||||
"northeurope": true, // North Europe
|
||||
"norwayeast": true, // Norway East
|
||||
"norwaywest": true, // Norway West
|
||||
"polandcentral": true, // Poland Central
|
||||
"qatarcentral": true, // Qatar Central
|
||||
"southafricanorth": true, // South Africa North
|
||||
"southafricawest": true, // South Africa West
|
||||
"southcentralus": true, // South Central US
|
||||
"southindia": true, // South India
|
||||
"southeastasia": true, // Southeast Asia
|
||||
"spaincentral": true, // Spain Central
|
||||
"swedencentral": true, // Sweden Central
|
||||
"switzerlandnorth": true, // Switzerland North
|
||||
"switzerlandwest": true, // Switzerland West
|
||||
"uaecentral": true, // UAE Central
|
||||
"uaenorth": true, // UAE North
|
||||
"uksouth": true, // UK South
|
||||
"ukwest": true, // UK West
|
||||
"westcentralus": true, // West Central US
|
||||
"westeurope": true, // West Europe
|
||||
"westindia": true, // West India
|
||||
"westus": true, // West US
|
||||
"westus2": true, // West US 2
|
||||
"westus3": true, // West US 3
|
||||
}
|
||||
263
pkg/types/cloudintegrationtypes/servicedefinitions.go
Normal file
263
pkg/types/cloudintegrationtypes/servicedefinitions.go
Normal file
@@ -0,0 +1,263 @@
|
||||
package cloudintegrationtypes
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
var S3Sync = valuer.NewString("s3sync")
|
||||
|
||||
// Generic interface for cloud service definition.
|
||||
// This is implemented by AWSDefinition and AzureDefinition, which represent service definitions for AWS and Azure respectively.
|
||||
// Generics work well so far because service definitions share a similar logic.
|
||||
// We dont want to over-do generics as well, if the service definitions functionally diverge in the future consider breaking generics.
|
||||
type Definition interface {
|
||||
GetId() string
|
||||
Validate() error
|
||||
PopulateDashboardURLs(cloudProvider CloudProviderType, svcId string)
|
||||
GetIngestionStatusCheck() *IngestionStatusCheck
|
||||
GetAssets() Assets
|
||||
}
|
||||
|
||||
// AWSDefinition represents AWS Service definition, which includes collection strategy, dashboards and meta info for integration
|
||||
type AWSDefinition = ServiceDefinition[AWSCollectionStrategy]
|
||||
|
||||
// AzureDefinition represents Azure Service definition, which includes collection strategy, dashboards and meta info for integration
|
||||
type AzureDefinition = ServiceDefinition[AzureCollectionStrategy]
|
||||
|
||||
// Making AWSDefinition and AzureDefinition satisfy Definition interface, so that they can be used in a generic way
|
||||
var (
|
||||
_ Definition = &AWSDefinition{}
|
||||
_ Definition = &AzureDefinition{}
|
||||
)
|
||||
|
||||
// ServiceDefinition represents generic struct for cloud service, regardless of the cloud provider.
|
||||
// this struct must satify Definition interface.
|
||||
// StrategyT is of either AWSCollectionStrategy or AzureCollectionStrategy, depending on the cloud provider.
|
||||
type ServiceDefinition[StrategyT any] struct {
|
||||
DefinitionMetadata
|
||||
Overview string `json:"overview"` // markdown
|
||||
Assets Assets `json:"assets"`
|
||||
SupportedSignals SupportedSignals `json:"supported_signals"`
|
||||
DataCollected DataCollected `json:"data_collected"`
|
||||
IngestionStatusCheck *IngestionStatusCheck `json:"ingestion_status_check,omitempty"`
|
||||
Strategy *StrategyT `json:"telemetry_collection_strategy"`
|
||||
}
|
||||
|
||||
// Following methods are quite self explanatory, they are just to satisfy the Definition interface and provide some utility functions for service definitions.
|
||||
func (def *ServiceDefinition[StrategyT]) GetId() string {
|
||||
return def.Id
|
||||
}
|
||||
|
||||
func (def *ServiceDefinition[StrategyT]) Validate() error {
|
||||
seenDashboardIds := map[string]interface{}{}
|
||||
|
||||
if def.Strategy == nil {
|
||||
return errors.NewInternalf(errors.CodeInternal, "telemetry_collection_strategy is required")
|
||||
}
|
||||
|
||||
for _, dd := range def.Assets.Dashboards {
|
||||
if _, seen := seenDashboardIds[dd.Id]; seen {
|
||||
return errors.NewInternalf(errors.CodeInternal, "multiple dashboards found with id %s", dd.Id)
|
||||
}
|
||||
seenDashboardIds[dd.Id] = nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (def *ServiceDefinition[StrategyT]) PopulateDashboardURLs(cloudProvider CloudProviderType, svcId string) {
|
||||
for i := range def.Assets.Dashboards {
|
||||
dashboardId := def.Assets.Dashboards[i].Id
|
||||
url := "/dashboard/" + GetCloudIntegrationDashboardID(cloudProvider, svcId, dashboardId)
|
||||
def.Assets.Dashboards[i].Url = url
|
||||
}
|
||||
}
|
||||
|
||||
func (def *ServiceDefinition[StrategyT]) GetIngestionStatusCheck() *IngestionStatusCheck {
|
||||
return def.IngestionStatusCheck
|
||||
}
|
||||
|
||||
func (def *ServiceDefinition[StrategyT]) GetAssets() Assets {
|
||||
return def.Assets
|
||||
}
|
||||
|
||||
// DefinitionMetadata represents service definition metadata. This is useful for showing service overview
|
||||
type DefinitionMetadata struct {
|
||||
Id string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Icon string `json:"icon"`
|
||||
}
|
||||
|
||||
// IngestionStatusCheckCategory represents a category of ingestion status check. Applies for both metrics and logs.
|
||||
// A category can be "Overview" of metrics or "Enhanced" Metrics for AWS, and "Transaction" or "Capacity" metrics for Azure.
|
||||
// Each category can have multiple checks (AND logic), if all checks pass,
|
||||
// then we can be sure that data is being ingested for that category of the signal
|
||||
type IngestionStatusCheckCategory struct {
|
||||
Category string `json:"category"`
|
||||
DisplayName string `json:"display_name"`
|
||||
Checks []*IngestionStatusCheckAttribute `json:"checks"`
|
||||
}
|
||||
|
||||
// IngestionStatusCheckAttribute represents a check or condition for ingestion status.
|
||||
// Key can be metric name or part of log message
|
||||
type IngestionStatusCheckAttribute struct {
|
||||
Key string `json:"key"` // OPTIONAL search key (metric name or log message)
|
||||
Attributes []*IngestionStatusCheckAttributeFilter `json:"attributes"`
|
||||
}
|
||||
|
||||
// IngestionStatusCheck represents combined checks for metrics and logs for a service
|
||||
type IngestionStatusCheck struct {
|
||||
Metrics []*IngestionStatusCheckCategory `json:"metrics"`
|
||||
Logs []*IngestionStatusCheckCategory `json:"logs"`
|
||||
}
|
||||
|
||||
// IngestionStatusCheckAttributeFilter represents filter for a check, which can be used to filter specific log messages or metrics with specific attributes.
|
||||
// For example, we can use it to filter logs with specific log level or metrics with specific dimensions.
|
||||
type IngestionStatusCheckAttributeFilter struct {
|
||||
Name string `json:"name"`
|
||||
Operator string `json:"operator"`
|
||||
Value string `json:"value"` // OPTIONAL
|
||||
}
|
||||
|
||||
// Assets represents the collection of dashboards
|
||||
type Assets struct {
|
||||
Dashboards []Dashboard `json:"dashboards"`
|
||||
}
|
||||
|
||||
// SupportedSignals for cloud provider's service
|
||||
type SupportedSignals struct {
|
||||
Logs bool `json:"logs"`
|
||||
Metrics bool `json:"metrics"`
|
||||
}
|
||||
|
||||
// DataCollected is curated static list of metrics and logs, this is shown as part of service overview
|
||||
type DataCollected struct {
|
||||
Logs []CollectedLogAttribute `json:"logs"`
|
||||
Metrics []CollectedMetric `json:"metrics"`
|
||||
}
|
||||
|
||||
// CollectedLogAttribute represents a log attribute that is present in all log entries for a service,
|
||||
// this is shown as part of service overview
|
||||
type CollectedLogAttribute struct {
|
||||
Name string `json:"name"`
|
||||
Path string `json:"path"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
// CollectedMetric represents a metric that is collected for a service, this is shown as part of service overview
|
||||
type CollectedMetric struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Unit string `json:"unit"`
|
||||
Description string `json:"description"`
|
||||
}
|
||||
|
||||
// AWSCollectionStrategy represents signal collection strategy for AWS services.
|
||||
// this is AWS specific.
|
||||
type AWSCollectionStrategy struct {
|
||||
Metrics *AWSMetricsStrategy `json:"aws_metrics,omitempty"`
|
||||
Logs *AWSLogsStrategy `json:"aws_logs,omitempty"`
|
||||
S3Buckets map[string][]string `json:"s3_buckets,omitempty"` // Only available in S3 Sync Service Type in AWS
|
||||
}
|
||||
|
||||
// AzureCollectionStrategy represents signal collection strategy for Azure services.
|
||||
// this is Azure specific.
|
||||
type AzureCollectionStrategy struct {
|
||||
Metrics []*AzureMetricsStrategy `json:"azure_metrics,omitempty"`
|
||||
Logs []*AzureLogsStrategy `json:"azure_logs,omitempty"`
|
||||
}
|
||||
|
||||
// AWSMetricsStrategy represents metrics collection strategy for AWS services.
|
||||
// this is AWS specific.
|
||||
type AWSMetricsStrategy struct {
|
||||
// to be used as https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-metricstream.html#cfn-cloudwatch-metricstream-includefilters
|
||||
StreamFilters []struct {
|
||||
// json tags here are in the shape expected by AWS API as detailed at
|
||||
// https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-metricstream-metricstreamfilter.html
|
||||
Namespace string `json:"Namespace"`
|
||||
MetricNames []string `json:"MetricNames,omitempty"`
|
||||
} `json:"cloudwatch_metric_stream_filters"`
|
||||
}
|
||||
|
||||
// AWSLogsStrategy represents logs collection strategy for AWS services.
|
||||
// this is AWS specific.
|
||||
type AWSLogsStrategy struct {
|
||||
Subscriptions []struct {
|
||||
// subscribe to all logs groups with specified prefix.
|
||||
// eg: `/aws/rds/`
|
||||
LogGroupNamePrefix string `json:"log_group_name_prefix"`
|
||||
|
||||
// https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html
|
||||
// "" implies no filtering is required.
|
||||
FilterPattern string `json:"filter_pattern"`
|
||||
} `json:"cloudwatch_logs_subscriptions"`
|
||||
}
|
||||
|
||||
// AzureMetricsStrategy represents metrics collection strategy for Azure services.
|
||||
// this is Azure specific.
|
||||
type AzureMetricsStrategy struct {
|
||||
CategoryType string `json:"category_type"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
// AzureLogsStrategy represents logs collection strategy for Azure services.
|
||||
// this is Azure specific. Even though this is similar to AzureMetricsStrategy, keeping it separate for future flexibility and clarity.
|
||||
type AzureLogsStrategy struct {
|
||||
CategoryType string `json:"category_type"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
// Dashboard represents a dashboard definition for cloud integration.
|
||||
type Dashboard struct {
|
||||
Id string `json:"id"`
|
||||
Url string `json:"url"`
|
||||
Title string `json:"title"`
|
||||
Description string `json:"description"`
|
||||
Image string `json:"image"`
|
||||
Definition *dashboardtypes.StorableDashboardData `json:"definition,omitempty"`
|
||||
}
|
||||
|
||||
// UTILS
|
||||
|
||||
// GetCloudIntegrationDashboardID returns the dashboard id for a cloud integration, given the cloud provider, service id, and dashboard id.
|
||||
// This is used to generate unique dashboard ids for cloud integration, and also to parse the dashboard id to get the cloud provider and service id when needed.
|
||||
func GetCloudIntegrationDashboardID(cloudProvider CloudProviderType, svcId, dashboardId string) string {
|
||||
return fmt.Sprintf("cloud-integration--%s--%s--%s", cloudProvider, svcId, dashboardId)
|
||||
}
|
||||
|
||||
// GetDashboardsFromAssets returns the list of dashboards for the cloud provider service from definition
|
||||
func GetDashboardsFromAssets(
|
||||
svcId string,
|
||||
orgID valuer.UUID,
|
||||
cloudProvider CloudProviderType,
|
||||
createdAt *time.Time,
|
||||
assets Assets,
|
||||
) []*dashboardtypes.Dashboard {
|
||||
dashboards := make([]*dashboardtypes.Dashboard, 0)
|
||||
|
||||
for _, d := range assets.Dashboards {
|
||||
author := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
dashboards = append(dashboards, &dashboardtypes.Dashboard{
|
||||
ID: GetCloudIntegrationDashboardID(cloudProvider, svcId, d.Id),
|
||||
Locked: true,
|
||||
OrgID: orgID,
|
||||
Data: *d.Definition,
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: *createdAt,
|
||||
UpdatedAt: *createdAt,
|
||||
},
|
||||
UserAuditable: types.UserAuditable{
|
||||
CreatedBy: author,
|
||||
UpdatedBy: author,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return dashboards
|
||||
}
|
||||
42
pkg/types/cloudintegrationtypes/store.go
Normal file
42
pkg/types/cloudintegrationtypes/store.go
Normal file
@@ -0,0 +1,42 @@
|
||||
package cloudintegrationtypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
)
|
||||
|
||||
type CloudIntegrationAccountStore interface {
|
||||
ListConnected(ctx context.Context, orgId string, provider string) ([]CloudIntegration, error)
|
||||
|
||||
Get(ctx context.Context, orgId string, provider string, id string) (*CloudIntegration, error)
|
||||
|
||||
GetConnectedCloudAccount(ctx context.Context, orgId, provider string, accountID string) (*CloudIntegration, error)
|
||||
|
||||
// Insert an account or update it by (cloudProvider, id)
|
||||
// for specified non-empty fields
|
||||
Upsert(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
provider string,
|
||||
id *string,
|
||||
config []byte,
|
||||
accountId *string,
|
||||
agentReport *AgentReport,
|
||||
removedAt *time.Time,
|
||||
) (*CloudIntegration, error)
|
||||
}
|
||||
|
||||
type CloudIntegrationServiceStore interface {
|
||||
Get(ctx context.Context, orgID, cloudAccountId, serviceType string) ([]byte, error)
|
||||
|
||||
Upsert(
|
||||
ctx context.Context,
|
||||
orgID,
|
||||
cloudProvider,
|
||||
cloudAccountId,
|
||||
serviceId string,
|
||||
config []byte,
|
||||
) ([]byte, error)
|
||||
|
||||
GetAllForAccount(ctx context.Context, orgID, cloudAccountId string) (map[string][]byte, error)
|
||||
}
|
||||
@@ -18,6 +18,7 @@ var (
|
||||
var (
|
||||
TemplateNameInvitationEmail = TemplateName{valuer.NewString("invitation")}
|
||||
TemplateNameResetPassword = TemplateName{valuer.NewString("reset_password")}
|
||||
TemplateNameAPIKeyEvent = TemplateName{valuer.NewString("api_key_event")}
|
||||
)
|
||||
|
||||
type TemplateName struct{ valuer.String }
|
||||
@@ -28,6 +29,8 @@ func NewTemplateName(name string) (TemplateName, error) {
|
||||
return TemplateNameInvitationEmail, nil
|
||||
case TemplateNameResetPassword.StringValue():
|
||||
return TemplateNameResetPassword, nil
|
||||
case TemplateNameAPIKeyEvent.StringValue():
|
||||
return TemplateNameAPIKeyEvent, nil
|
||||
default:
|
||||
return TemplateName{}, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid template name: %s", name)
|
||||
}
|
||||
|
||||
@@ -1,120 +0,0 @@
|
||||
package exporttypes
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// ExportRawDataQueryParams represents the query parameters for the export raw data endpoint
|
||||
type ExportRawDataQueryParams struct {
|
||||
ExportRawDataFormatQueryParam
|
||||
|
||||
// Signal specifies the type of data to export: "logs" or "traces"
|
||||
Signal telemetrytypes.Signal `query:"signal" enum:"logs,traces" required:"true"`
|
||||
|
||||
// Source specifies the type of data to export: "logs" or "traces"
|
||||
// Deprecated: Use Signal instead.
|
||||
Source string `query:"source" deprecated:"true"`
|
||||
|
||||
// Start is the start time for the query (Unix timestamp in nanoseconds)
|
||||
Start uint64 `query:"start"`
|
||||
|
||||
// End is the end time for the query (Unix timestamp in nanoseconds)
|
||||
End uint64 `query:"end"`
|
||||
|
||||
// Limit specifies the maximum number of rows to export
|
||||
Limit int `query:"limit,default=10000" default:"10000" minimum:"1" maximum:"50000"`
|
||||
|
||||
// Filter is a filter expression to apply to the query
|
||||
FilterString string `query:"filter" deprecated:"true"`
|
||||
Filter qbtypes.Filter `query:"filterExpression"`
|
||||
|
||||
// Columns specifies the columns to include in the export
|
||||
// Format: ["context.field:type", "context.field", "field"]
|
||||
Columns []string `query:"columns" deprecated:"true"`
|
||||
|
||||
// SelectFields specifies the columns to include in the export
|
||||
SelectFields []telemetrytypes.TelemetryFieldKey `query:"selectFields"`
|
||||
|
||||
// OrderBy specifies the sorting order
|
||||
// Format: "column:direction" or "context.field:type:direction"
|
||||
// Direction can be "asc" or "desc"
|
||||
// ** Deprecated **
|
||||
OrderBy string `query:"order_by" deprecated:"true"`
|
||||
|
||||
// order by keys and directions
|
||||
Order []qbtypes.OrderBy `query:"order"`
|
||||
}
|
||||
|
||||
type ExportRawDataFormatQueryParam struct {
|
||||
// Format specifies the output format: "csv" or "jsonl"
|
||||
Format string `query:"format,default=csv" default:"csv" enum:"csv,jsonl"`
|
||||
}
|
||||
|
||||
func (p *ExportRawDataQueryParams) Normalize() {
|
||||
if len(p.Order) == 0 && len(p.OrderBy) > 0 {
|
||||
p.Order = parseExportQueryOrderBy(p.OrderBy)
|
||||
}
|
||||
|
||||
if len(p.SelectFields) == 0 && len(p.Columns) != 0 {
|
||||
p.SelectFields = parseExportQueryColumns(p.Columns)
|
||||
|
||||
}
|
||||
|
||||
if len(p.Filter.Expression) == 0 && len(p.FilterString) > 0 {
|
||||
p.Filter = qbtypes.Filter{Expression: p.FilterString}
|
||||
}
|
||||
|
||||
if p.Signal == telemetrytypes.SignalUnspecified && p.Source != "" {
|
||||
p.Signal = telemetrytypes.Signal{String: valuer.NewString(p.Source)}
|
||||
}
|
||||
}
|
||||
|
||||
func (p *ExportRawDataQueryParams) Validate() error {
|
||||
|
||||
if p.Signal != telemetrytypes.SignalLogs && p.Signal != telemetrytypes.SignalTraces {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid signal %s", p.Signal).WithAdditional("Allowed values: [logs, traces]")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// parseExportQueryColumns converts bound column strings to TelemetryFieldKey structs.
|
||||
// Each column should be in the format "context.field:type" or "context.field" or "field"
|
||||
func parseExportQueryColumns(columnParams []string) []telemetrytypes.TelemetryFieldKey {
|
||||
columns := make([]telemetrytypes.TelemetryFieldKey, 0, len(columnParams))
|
||||
for _, columnStr := range columnParams {
|
||||
columnStr = strings.TrimSpace(columnStr)
|
||||
if columnStr == "" {
|
||||
continue
|
||||
}
|
||||
columns = append(columns, telemetrytypes.GetFieldKeyFromKeyText(columnStr))
|
||||
}
|
||||
return columns
|
||||
}
|
||||
|
||||
// parseExportQueryOrderBy converts a bound order_by string to an OrderBy slice.
|
||||
// The string should be in the format "column:direction" and is assumed already validated.
|
||||
func parseExportQueryOrderBy(orderByParam string) []qbtypes.OrderBy {
|
||||
orderByParam = strings.TrimSpace(orderByParam)
|
||||
if orderByParam == "" {
|
||||
return []qbtypes.OrderBy{}
|
||||
}
|
||||
|
||||
parts := strings.Split(orderByParam, ":")
|
||||
column := strings.Join(parts[:len(parts)-1], ":")
|
||||
direction := parts[len(parts)-1]
|
||||
|
||||
return []qbtypes.OrderBy{
|
||||
{
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.GetFieldKeyFromKeyText(column),
|
||||
},
|
||||
Direction: qbtypes.OrderDirectionMap[direction],
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -1,174 +0,0 @@
|
||||
package exporttypes
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestParseExportQueryColumns(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input []string
|
||||
expectedColumns []telemetrytypes.TelemetryFieldKey
|
||||
}{
|
||||
{
|
||||
name: "empty input",
|
||||
input: []string{},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{},
|
||||
},
|
||||
{
|
||||
name: "single column",
|
||||
input: []string{"timestamp"},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "multiple columns",
|
||||
input: []string{"timestamp", "message", "level"},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "message"},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "empty entry is skipped",
|
||||
input: []string{"timestamp", "", "level"},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "whitespace-only entry is skipped",
|
||||
input: []string{"timestamp", " ", "level"},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "column with context and type",
|
||||
input: []string{"attribute.user:string"},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "user", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "column with context, dot-notation name",
|
||||
input: []string{"attribute.user.string"},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "user.string", FieldContext: telemetrytypes.FieldContextAttribute},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
columns := parseExportQueryColumns(tt.input)
|
||||
assert.Equal(t, len(tt.expectedColumns), len(columns))
|
||||
for i, expected := range tt.expectedColumns {
|
||||
assert.Equal(t, expected, columns[i])
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseExportQueryOrderBy(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expectedOrder []qbtypes.OrderBy
|
||||
}{
|
||||
{
|
||||
name: "empty string returns empty slice",
|
||||
input: "",
|
||||
expectedOrder: []qbtypes.OrderBy{},
|
||||
},
|
||||
{
|
||||
name: "simple column asc",
|
||||
input: "timestamp:asc",
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "timestamp"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "simple column desc",
|
||||
input: "timestamp:desc",
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "timestamp"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "column with context and type qualifier",
|
||||
input: "attribute.user:string:desc",
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "user",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "column with context, dot-notation name",
|
||||
input: "attribute.user.string:desc",
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "user.string",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "resource with context and type",
|
||||
input: "resource.service.name:string:asc",
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
order := parseExportQueryOrderBy(tt.input)
|
||||
assert.Equal(t, len(tt.expectedOrder), len(order))
|
||||
for i, expected := range tt.expectedOrder {
|
||||
assert.Equal(t, expected, order[i])
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package types
|
||||
package integrationtypes
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
@@ -26,17 +27,17 @@ var AllIntegrationUserEmails = []IntegrationUserEmail{
|
||||
type InstalledIntegration struct {
|
||||
bun.BaseModel `bun:"table:installed_integration"`
|
||||
|
||||
Identifiable
|
||||
types.Identifiable
|
||||
Type string `json:"type" bun:"type,type:text,unique:org_id_type"`
|
||||
Config InstalledIntegrationConfig `json:"config" bun:"config,type:text"`
|
||||
InstalledAt time.Time `json:"installed_at" bun:"installed_at,default:current_timestamp"`
|
||||
OrgID string `json:"org_id" bun:"org_id,type:text,unique:org_id_type,references:organizations(id),on_delete:cascade"`
|
||||
}
|
||||
|
||||
type InstalledIntegrationConfig map[string]interface{}
|
||||
type InstalledIntegrationConfig map[string]any
|
||||
|
||||
// For serializing from db
|
||||
func (c *InstalledIntegrationConfig) Scan(src interface{}) error {
|
||||
func (c *InstalledIntegrationConfig) Scan(src any) error {
|
||||
var data []byte
|
||||
switch v := src.(type) {
|
||||
case []byte:
|
||||
@@ -67,8 +68,8 @@ func (c *InstalledIntegrationConfig) Value() (driver.Value, error) {
|
||||
type CloudIntegration struct {
|
||||
bun.BaseModel `bun:"table:cloud_integration"`
|
||||
|
||||
Identifiable
|
||||
TimeAuditable
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Provider string `json:"provider" bun:"provider,type:text,unique:provider_id"`
|
||||
Config *AccountConfig `json:"config" bun:"config,type:text"`
|
||||
AccountID *string `json:"account_id" bun:"account_id,type:text"`
|
||||
@@ -194,8 +195,8 @@ func (r *AgentReport) Value() (driver.Value, error) {
|
||||
type CloudIntegrationService struct {
|
||||
bun.BaseModel `bun:"table:cloud_integration_service,alias:cis"`
|
||||
|
||||
Identifiable
|
||||
TimeAuditable
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Type string `bun:"type,type:text,notnull,unique:cloud_integration_id_type"`
|
||||
Config CloudServiceConfig `bun:"config,type:text"`
|
||||
CloudIntegrationID string `bun:"cloud_integration_id,type:text,notnull,unique:cloud_integration_id_type,references:cloud_integrations(id),on_delete:cascade"`
|
||||
@@ -3,6 +3,7 @@ package metrictypes
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
@@ -135,6 +136,10 @@ func (t *Type) Scan(src interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t Type) IsPercentileSpaceAggregationAllowed() bool {
|
||||
return t == HistogramType || t == ExpHistogramType || t == SummaryType
|
||||
}
|
||||
|
||||
var (
|
||||
GaugeType = Type{valuer.NewString("gauge")}
|
||||
SumType = Type{valuer.NewString("sum")}
|
||||
@@ -185,6 +190,10 @@ func (TimeAggregation) Enum() []any {
|
||||
}
|
||||
}
|
||||
|
||||
func (t TimeAggregation) IsValid() bool {
|
||||
return slices.ContainsFunc(t.Enum(), func(v any) bool { return v == t })
|
||||
}
|
||||
|
||||
type SpaceAggregation struct {
|
||||
valuer.String
|
||||
}
|
||||
@@ -218,6 +227,10 @@ func (SpaceAggregation) Enum() []any {
|
||||
}
|
||||
}
|
||||
|
||||
func (s SpaceAggregation) IsValid() bool {
|
||||
return slices.ContainsFunc(s.Enum(), func(v any) bool { return v == s })
|
||||
}
|
||||
|
||||
func (s SpaceAggregation) IsPercentile() bool {
|
||||
return s == SpaceAggregationPercentile50 ||
|
||||
s == SpaceAggregationPercentile75 ||
|
||||
|
||||
@@ -393,59 +393,6 @@ func (r *QueryRangeRequest) HasOrderSpecified() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// UseDefaultOrderBy applies a default order unless query has an explicit order provided.
|
||||
func (r *QueryRangeRequest) UseDefaultOrderBy() {
|
||||
queries := r.CompositeQuery.Queries
|
||||
for idx := range queries {
|
||||
switch queries[idx].Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation],
|
||||
QueryBuilderTraceOperator:
|
||||
if len(queries[idx].GetOrder()) == 0 {
|
||||
queries[idx].SetOrder(
|
||||
[]OrderBy{
|
||||
{
|
||||
Key: OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "timestamp",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
},
|
||||
Direction: OrderDirectionDesc,
|
||||
},
|
||||
},
|
||||
)
|
||||
}
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
if len(queries[idx].GetOrder()) == 0 {
|
||||
queries[idx].SetOrder(
|
||||
[]OrderBy{
|
||||
{
|
||||
Key: OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "timestamp",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber},
|
||||
},
|
||||
Direction: OrderDirectionDesc,
|
||||
},
|
||||
{
|
||||
Key: OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "id",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString},
|
||||
},
|
||||
Direction: OrderDirectionDesc,
|
||||
},
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *QueryRangeRequest) FuncsForQuery(name string) []Function {
|
||||
funcs := []Function{}
|
||||
for _, query := range r.CompositeQuery.Queries {
|
||||
@@ -490,16 +437,6 @@ func (r *QueryRangeRequest) IsAnomalyRequest() (*QueryBuilderQuery[MetricAggrega
|
||||
return &q, hasAnomaly
|
||||
}
|
||||
|
||||
func (r *QueryRangeRequest) TraceOperatorQueryIndex() int {
|
||||
for idx, query := range r.CompositeQuery.Queries {
|
||||
switch query.Spec.(type) {
|
||||
case TraceOperatorType:
|
||||
return idx
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
// We do not support fill gaps for these queries. Maybe support in future?
|
||||
func (r *QueryRangeRequest) SkipFillGaps(name string) bool {
|
||||
for _, query := range r.CompositeQuery.Queries {
|
||||
|
||||
@@ -1,379 +0,0 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
|
||||
// GetExpression returns the expression string.
|
||||
func (q *QueryEnvelope) GetExpression() string {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Expression
|
||||
case QueryBuilderFormula:
|
||||
return spec.Expression
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// GetReturnSpansFrom returns the return-spans-from value.
|
||||
func (q *QueryEnvelope) GetReturnSpansFrom() string {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.ReturnSpansFrom
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// GetSignal returns the signal.
|
||||
func (q *QueryEnvelope) GetSignal() telemetrytypes.Signal {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Signal
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Signal
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Signal
|
||||
}
|
||||
return telemetrytypes.SignalUnspecified
|
||||
}
|
||||
|
||||
// GetSource returns the source.
|
||||
func (q *QueryEnvelope) GetSource() telemetrytypes.Source {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Source
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Source
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Source
|
||||
}
|
||||
return telemetrytypes.SourceUnspecified
|
||||
}
|
||||
|
||||
// GetQuery returns the raw query string.
|
||||
func (q *QueryEnvelope) GetQuery() string {
|
||||
switch spec := q.Spec.(type) {
|
||||
case PromQuery:
|
||||
return spec.Query
|
||||
case ClickHouseQuery:
|
||||
return spec.Query
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// GetStats returns the PromQL stats flag.
|
||||
func (q *QueryEnvelope) GetStats() bool {
|
||||
switch spec := q.Spec.(type) {
|
||||
case PromQuery:
|
||||
return spec.Stats
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// GetLeft returns the left query reference of a join.
|
||||
func (q *QueryEnvelope) GetLeft() QueryRef {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderJoin:
|
||||
return spec.Left
|
||||
}
|
||||
return QueryRef{}
|
||||
}
|
||||
|
||||
// GetRight returns the right query reference of a join.
|
||||
func (q *QueryEnvelope) GetRight() QueryRef {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderJoin:
|
||||
return spec.Right
|
||||
}
|
||||
return QueryRef{}
|
||||
}
|
||||
|
||||
// GetJoinType returns the join type.
|
||||
func (q *QueryEnvelope) GetJoinType() JoinType {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderJoin:
|
||||
return spec.Type
|
||||
}
|
||||
return JoinType{}
|
||||
}
|
||||
|
||||
// GetOn returns the join ON condition.
|
||||
func (q *QueryEnvelope) GetOn() string {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderJoin:
|
||||
return spec.On
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// GetQueryName returns the name of the spec.
|
||||
func (q *QueryEnvelope) GetQueryName() string {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Name
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Name
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Name
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Name
|
||||
case QueryBuilderFormula:
|
||||
return spec.Name
|
||||
case QueryBuilderJoin:
|
||||
return spec.Name
|
||||
case PromQuery:
|
||||
return spec.Name
|
||||
case ClickHouseQuery:
|
||||
return spec.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// IsDisabled returns whether the spec is disabled.
|
||||
func (q *QueryEnvelope) IsDisabled() bool {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Disabled
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Disabled
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Disabled
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Disabled
|
||||
case QueryBuilderFormula:
|
||||
return spec.Disabled
|
||||
case QueryBuilderJoin:
|
||||
return spec.Disabled
|
||||
case PromQuery:
|
||||
return spec.Disabled
|
||||
case ClickHouseQuery:
|
||||
return spec.Disabled
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// GetLimit returns the row limit.
|
||||
func (q *QueryEnvelope) GetLimit() int {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Limit
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Limit
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Limit
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Limit
|
||||
case QueryBuilderFormula:
|
||||
return spec.Limit
|
||||
case QueryBuilderJoin:
|
||||
return spec.Limit
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// GetOffset returns the row offset.
|
||||
func (q *QueryEnvelope) GetOffset() int {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Offset
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Offset
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Offset
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Offset
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// GetType returns the QueryType of the envelope.
|
||||
func (q *QueryEnvelope) GetType() QueryType {
|
||||
return q.Type
|
||||
}
|
||||
|
||||
// GetOrder returns the order-by clauses.
|
||||
func (q *QueryEnvelope) GetOrder() []OrderBy {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Order
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Order
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Order
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Order
|
||||
case QueryBuilderFormula:
|
||||
return spec.Order
|
||||
case QueryBuilderJoin:
|
||||
return spec.Order
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetGroupBy returns the group-by keys.
|
||||
func (q *QueryEnvelope) GetGroupBy() []GroupByKey {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.GroupBy
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.GroupBy
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.GroupBy
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.GroupBy
|
||||
case QueryBuilderJoin:
|
||||
return spec.GroupBy
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetFilter returns the filter.
|
||||
func (q *QueryEnvelope) GetFilter() *Filter {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Filter
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Filter
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Filter
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Filter
|
||||
case QueryBuilderJoin:
|
||||
return spec.Filter
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetHaving returns the having clause.
|
||||
func (q *QueryEnvelope) GetHaving() *Having {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Having
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Having
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Having
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Having
|
||||
case QueryBuilderFormula:
|
||||
return spec.Having
|
||||
case QueryBuilderJoin:
|
||||
return spec.Having
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetFunctions returns the post-processing functions.
|
||||
func (q *QueryEnvelope) GetFunctions() []Function {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Functions
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Functions
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Functions
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Functions
|
||||
case QueryBuilderFormula:
|
||||
return spec.Functions
|
||||
case QueryBuilderJoin:
|
||||
return spec.Functions
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetSelectFields returns the selected fields.
|
||||
func (q *QueryEnvelope) GetSelectFields() []telemetrytypes.TelemetryFieldKey {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.SelectFields
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.SelectFields
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.SelectFields
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.SelectFields
|
||||
case QueryBuilderJoin:
|
||||
return spec.SelectFields
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetLegend returns the legend label.
|
||||
func (q *QueryEnvelope) GetLegend() string {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Legend
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Legend
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Legend
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Legend
|
||||
case QueryBuilderFormula:
|
||||
return spec.Legend
|
||||
case PromQuery:
|
||||
return spec.Legend
|
||||
case ClickHouseQuery:
|
||||
return spec.Legend
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// GetCursor returns the pagination cursor.
|
||||
func (q *QueryEnvelope) GetCursor() string {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Cursor
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Cursor
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Cursor
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Cursor
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// GetStepInterval returns the step interval.
|
||||
func (q *QueryEnvelope) GetStepInterval() Step {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.StepInterval
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.StepInterval
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.StepInterval
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.StepInterval
|
||||
case PromQuery:
|
||||
return spec.Step
|
||||
}
|
||||
return Step{}
|
||||
}
|
||||
|
||||
// GetSecondaryAggregations returns the secondary aggregations.
|
||||
func (q *QueryEnvelope) GetSecondaryAggregations() []SecondaryAggregation {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.SecondaryAggregations
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.SecondaryAggregations
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.SecondaryAggregations
|
||||
case QueryBuilderJoin:
|
||||
return spec.SecondaryAggregations
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetLimitBy returns the limit-by configuration.
|
||||
func (q *QueryEnvelope) GetLimitBy() *LimitBy {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.LimitBy
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.LimitBy
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.LimitBy
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -1,452 +0,0 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
|
||||
// SetExpression sets the expression string of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetExpression(expression string) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Expression = expression
|
||||
q.Spec = spec
|
||||
case QueryBuilderFormula:
|
||||
spec.Expression = expression
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetReturnSpansFrom sets the return-spans-from value, if applicable.
|
||||
func (q *QueryEnvelope) SetReturnSpansFrom(returnSpansFrom string) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.ReturnSpansFrom = returnSpansFrom
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetSignal sets the signal of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetSignal(signal telemetrytypes.Signal) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Signal = signal
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Signal = signal
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Signal = signal
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetSource sets the source of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetSource(source telemetrytypes.Source) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Source = source
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Source = source
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Source = source
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetQuery sets the raw query string of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetQuery(query string) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case PromQuery:
|
||||
spec.Query = query
|
||||
q.Spec = spec
|
||||
case ClickHouseQuery:
|
||||
spec.Query = query
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetStats sets the PromQL stats flag, if applicable.
|
||||
func (q *QueryEnvelope) SetStats(stats bool) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case PromQuery:
|
||||
spec.Stats = stats
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetLeft sets the left query reference of a join, if applicable.
|
||||
func (q *QueryEnvelope) SetLeft(left QueryRef) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderJoin:
|
||||
spec.Left = left
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetRight sets the right query reference of a join, if applicable.
|
||||
func (q *QueryEnvelope) SetRight(right QueryRef) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderJoin:
|
||||
spec.Right = right
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetJoinType sets the join type, if applicable.
|
||||
func (q *QueryEnvelope) SetJoinType(joinType JoinType) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderJoin:
|
||||
spec.Type = joinType
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetOn sets the join ON condition, if applicable.
|
||||
func (q *QueryEnvelope) SetOn(on string) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderJoin:
|
||||
spec.On = on
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetQueryName sets the name of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetQueryName(name string) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Name = name
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Name = name
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Name = name
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Name = name
|
||||
q.Spec = spec
|
||||
case QueryBuilderFormula:
|
||||
spec.Name = name
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.Name = name
|
||||
q.Spec = spec
|
||||
case PromQuery:
|
||||
spec.Name = name
|
||||
q.Spec = spec
|
||||
case ClickHouseQuery:
|
||||
spec.Name = name
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetDisabled sets the disabled flag of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetDisabled(disabled bool) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Disabled = disabled
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Disabled = disabled
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Disabled = disabled
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Disabled = disabled
|
||||
q.Spec = spec
|
||||
case QueryBuilderFormula:
|
||||
spec.Disabled = disabled
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.Disabled = disabled
|
||||
q.Spec = spec
|
||||
case PromQuery:
|
||||
spec.Disabled = disabled
|
||||
q.Spec = spec
|
||||
case ClickHouseQuery:
|
||||
spec.Disabled = disabled
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetLimit sets the row limit of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetLimit(limit int) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Limit = limit
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Limit = limit
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Limit = limit
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Limit = limit
|
||||
q.Spec = spec
|
||||
case QueryBuilderFormula:
|
||||
spec.Limit = limit
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.Limit = limit
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetOffset sets the row offset of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetOffset(offset int) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Offset = offset
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Offset = offset
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Offset = offset
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Offset = offset
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetType sets the QueryType of the envelope.
|
||||
func (q *QueryEnvelope) SetType(t QueryType) {
|
||||
q.Type = t
|
||||
}
|
||||
|
||||
// SetOrder sets the order-by clauses of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetOrder(order []OrderBy) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Order = order
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Order = order
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Order = order
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Order = order
|
||||
q.Spec = spec
|
||||
case QueryBuilderFormula:
|
||||
spec.Order = order
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.Order = order
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetGroupBy sets the group-by keys of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetGroupBy(groupBy []GroupByKey) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.GroupBy = groupBy
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.GroupBy = groupBy
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.GroupBy = groupBy
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.GroupBy = groupBy
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.GroupBy = groupBy
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetFilter sets the filter of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetFilter(filter *Filter) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Filter = filter
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Filter = filter
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Filter = filter
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Filter = filter
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.Filter = filter
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetHaving sets the having clause of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetHaving(having *Having) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Having = having
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Having = having
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Having = having
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Having = having
|
||||
q.Spec = spec
|
||||
case QueryBuilderFormula:
|
||||
spec.Having = having
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.Having = having
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetFunctions sets the post-processing functions of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetFunctions(functions []Function) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Functions = functions
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Functions = functions
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Functions = functions
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Functions = functions
|
||||
q.Spec = spec
|
||||
case QueryBuilderFormula:
|
||||
spec.Functions = functions
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.Functions = functions
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetSelectFields sets the selected fields of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetSelectFields(fields []telemetrytypes.TelemetryFieldKey) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.SelectFields = fields
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.SelectFields = fields
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.SelectFields = fields
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.SelectFields = fields
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.SelectFields = fields
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetLegend sets the legend label of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetLegend(legend string) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Legend = legend
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Legend = legend
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Legend = legend
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Legend = legend
|
||||
q.Spec = spec
|
||||
case QueryBuilderFormula:
|
||||
spec.Legend = legend
|
||||
q.Spec = spec
|
||||
case PromQuery:
|
||||
spec.Legend = legend
|
||||
q.Spec = spec
|
||||
case ClickHouseQuery:
|
||||
spec.Legend = legend
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetCursor sets the pagination cursor of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetCursor(cursor string) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Cursor = cursor
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Cursor = cursor
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Cursor = cursor
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Cursor = cursor
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetStepInterval sets the step interval of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetStepInterval(step Step) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.StepInterval = step
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.StepInterval = step
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.StepInterval = step
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.StepInterval = step
|
||||
q.Spec = spec
|
||||
case PromQuery:
|
||||
spec.Step = step
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetSecondaryAggregations sets the secondary aggregations of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetSecondaryAggregations(secondaryAggregations []SecondaryAggregation) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.SecondaryAggregations = secondaryAggregations
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.SecondaryAggregations = secondaryAggregations
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.SecondaryAggregations = secondaryAggregations
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.SecondaryAggregations = secondaryAggregations
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetLimitBy sets the limit-by configuration of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetLimitBy(limitBy *LimitBy) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.LimitBy = limitBy
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.LimitBy = limitBy
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.LimitBy = limitBy
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
@@ -10,9 +10,55 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
// queryName returns the name from any query envelope spec type.
|
||||
func (e QueryEnvelope) queryName() string {
|
||||
switch spec := e.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Name
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Name
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Name
|
||||
case QueryBuilderFormula:
|
||||
return spec.Name
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Name
|
||||
case QueryBuilderJoin:
|
||||
return spec.Name
|
||||
case PromQuery:
|
||||
return spec.Name
|
||||
case ClickHouseQuery:
|
||||
return spec.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// isDisabled returns the disabled status from any query envelope spec type.
|
||||
func (e QueryEnvelope) isDisabled() bool {
|
||||
switch spec := e.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Disabled
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Disabled
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Disabled
|
||||
case QueryBuilderFormula:
|
||||
return spec.Disabled
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Disabled
|
||||
case QueryBuilderJoin:
|
||||
return spec.Disabled
|
||||
case PromQuery:
|
||||
return spec.Disabled
|
||||
case ClickHouseQuery:
|
||||
return spec.Disabled
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// getQueryIdentifier returns a friendly identifier for a query based on its type and name/content
|
||||
func getQueryIdentifier(envelope QueryEnvelope, index int) string {
|
||||
name := envelope.GetQueryName()
|
||||
name := envelope.queryName()
|
||||
|
||||
var typeLabel string
|
||||
switch envelope.Type {
|
||||
@@ -43,115 +89,50 @@ const (
|
||||
MaxQueryLimit = 10000
|
||||
)
|
||||
|
||||
// ValidationOption is a functional option for configuring validation behaviour.
|
||||
type ValidationOption func(*validationConfig)
|
||||
|
||||
type validationConfig struct {
|
||||
skipLimitValidation bool
|
||||
skipAggregationValidation bool
|
||||
skipHavingValidation bool
|
||||
skipAggregationOrderBy bool
|
||||
skipSelectFieldValidation bool
|
||||
skipGroupByValidation bool
|
||||
}
|
||||
|
||||
func applyValidationOptions(opts []ValidationOption) validationConfig {
|
||||
cfg := validationConfig{}
|
||||
for _, opt := range opts {
|
||||
opt(&cfg)
|
||||
}
|
||||
return cfg
|
||||
}
|
||||
|
||||
// SkipLimitValidation returns a ValidationOption that skips the limit range check.
|
||||
// Use this when the caller has already validated limits with different constraints.
|
||||
func WithSkipLimitValidation() ValidationOption {
|
||||
return func(cfg *validationConfig) {
|
||||
cfg.skipLimitValidation = true
|
||||
}
|
||||
}
|
||||
|
||||
// SkipAggregationValidation skips aggregation validation.
|
||||
// Used for raw/trace request types where aggregations are not required.
|
||||
func WithSkipAggregationValidation() ValidationOption {
|
||||
return func(cfg *validationConfig) {
|
||||
cfg.skipAggregationValidation = true
|
||||
}
|
||||
}
|
||||
|
||||
// SkipHavingValidation skips having-clause validation.
|
||||
// Used for raw/trace request types where having clauses do not apply.
|
||||
func WithSkipHavingValidation() ValidationOption {
|
||||
return func(cfg *validationConfig) {
|
||||
cfg.skipHavingValidation = true
|
||||
}
|
||||
}
|
||||
|
||||
// SkipAggregationOrderBy skips the aggregation-specific order-by key validation.
|
||||
// Used for raw/trace request types where order-by keys are not restricted to group-by or aggregation keys.
|
||||
func WithSkipAggregationOrderBy() ValidationOption {
|
||||
return func(cfg *validationConfig) {
|
||||
cfg.skipAggregationOrderBy = true
|
||||
}
|
||||
}
|
||||
|
||||
// SkipSelectFieldValidation skips select-field validation.
|
||||
// Used for aggregation request types where select fields do not apply.
|
||||
func WithSkipSelectFieldValidation() ValidationOption {
|
||||
return func(cfg *validationConfig) {
|
||||
cfg.skipSelectFieldValidation = true
|
||||
}
|
||||
}
|
||||
|
||||
// SkipGroupByValidation skips group-by validation.
|
||||
// Used for raw/trace request types where group-by does not apply.
|
||||
func WithSkipGroupByValidation() ValidationOption {
|
||||
return func(cfg *validationConfig) {
|
||||
cfg.skipGroupByValidation = true
|
||||
}
|
||||
}
|
||||
|
||||
// Validate performs preliminary validation on QueryBuilderQuery.
|
||||
func (q *QueryBuilderQuery[T]) Validate(opts ...ValidationOption) error {
|
||||
cfg := applyValidationOptions(opts)
|
||||
|
||||
// Validate performs preliminary validation on QueryBuilderQuery
|
||||
func (q *QueryBuilderQuery[T]) Validate(requestType RequestType) error {
|
||||
// Validate signal
|
||||
if err := q.validateSignal(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := q.validateAggregations(cfg); err != nil {
|
||||
if err := q.validateAggregations(requestType); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := q.validateGroupBy(cfg); err != nil {
|
||||
if err := q.validateGroupBy(requestType); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := q.validateLimitAndPagination(cfg); err != nil {
|
||||
// Validate limit and pagination
|
||||
if err := q.validateLimitAndPagination(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Validate functions
|
||||
if err := q.validateFunctions(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Validate secondary aggregations
|
||||
if err := q.validateSecondaryAggregations(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := q.validateOrderBy(cfg); err != nil {
|
||||
if err := q.validateOrderBy(requestType); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := q.validateSelectFields(cfg); err != nil {
|
||||
if err := q.validateSelectFields(requestType); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (q *QueryBuilderQuery[T]) validateSelectFields(cfg validationConfig) error {
|
||||
if cfg.skipSelectFieldValidation {
|
||||
func (q *QueryBuilderQuery[T]) validateSelectFields(requestType RequestType) error {
|
||||
// selectFields don't apply to aggregation queries, skip validation
|
||||
if requestType.IsAggregation() {
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -167,8 +148,9 @@ func (q *QueryBuilderQuery[T]) validateSelectFields(cfg validationConfig) error
|
||||
return nil
|
||||
}
|
||||
|
||||
func (q *QueryBuilderQuery[T]) validateGroupBy(cfg validationConfig) error {
|
||||
if cfg.skipGroupByValidation {
|
||||
func (q *QueryBuilderQuery[T]) validateGroupBy(requestType RequestType) error {
|
||||
// groupBy doesn't apply to non-aggregation queries, skip validation
|
||||
if !requestType.IsAggregation() {
|
||||
return nil
|
||||
}
|
||||
for idx, item := range q.GroupBy {
|
||||
@@ -201,8 +183,9 @@ func (q *QueryBuilderQuery[T]) validateSignal() error {
|
||||
}
|
||||
}
|
||||
|
||||
func (q *QueryBuilderQuery[T]) validateAggregations(cfg validationConfig) error {
|
||||
if cfg.skipAggregationValidation {
|
||||
func (q *QueryBuilderQuery[T]) validateAggregations(requestType RequestType) error {
|
||||
// aggregations don't apply to non-aggregation queries, skip validation
|
||||
if !requestType.IsAggregation() {
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -232,6 +215,13 @@ func (q *QueryBuilderQuery[T]) validateAggregations(cfg validationConfig) error
|
||||
aggId,
|
||||
)
|
||||
}
|
||||
if !v.SpaceAggregation.IsValid() {
|
||||
return errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`]",
|
||||
)
|
||||
}
|
||||
case TraceAggregation:
|
||||
if v.Expression == "" {
|
||||
aggId := fmt.Sprintf("aggregation #%d", i+1)
|
||||
@@ -282,25 +272,24 @@ func (q *QueryBuilderQuery[T]) validateAggregations(cfg validationConfig) error
|
||||
return nil
|
||||
}
|
||||
|
||||
func (q *QueryBuilderQuery[T]) validateLimitAndPagination(cfg validationConfig) error {
|
||||
if !cfg.skipLimitValidation {
|
||||
if q.Limit < 0 {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"limit must be non-negative, got %d",
|
||||
q.Limit,
|
||||
)
|
||||
}
|
||||
func (q *QueryBuilderQuery[T]) validateLimitAndPagination() error {
|
||||
// Validate limit
|
||||
if q.Limit < 0 {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"limit must be non-negative, got %d",
|
||||
q.Limit,
|
||||
)
|
||||
}
|
||||
|
||||
if q.Limit > MaxQueryLimit {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"limit exceeds maximum allowed value of %d",
|
||||
MaxQueryLimit,
|
||||
).WithAdditional(
|
||||
fmt.Sprintf("Provided limit: %d", q.Limit),
|
||||
)
|
||||
}
|
||||
if q.Limit > MaxQueryLimit {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"limit exceeds maximum allowed value of %d",
|
||||
MaxQueryLimit,
|
||||
).WithAdditional(
|
||||
fmt.Sprintf("Provided limit: %d", q.Limit),
|
||||
)
|
||||
}
|
||||
|
||||
// Validate offset
|
||||
@@ -347,7 +336,7 @@ func (q *QueryBuilderQuery[T]) validateSecondaryAggregations() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (q *QueryBuilderQuery[T]) validateOrderBy(cfg validationConfig) error {
|
||||
func (q *QueryBuilderQuery[T]) validateOrderBy(requestType RequestType) error {
|
||||
for i, order := range q.Order {
|
||||
// Direction validation is handled by the OrderDirection type
|
||||
if order.Direction != OrderDirectionAsc && order.Direction != OrderDirectionDesc {
|
||||
@@ -366,7 +355,8 @@ func (q *QueryBuilderQuery[T]) validateOrderBy(cfg validationConfig) error {
|
||||
}
|
||||
}
|
||||
|
||||
if !cfg.skipAggregationOrderBy {
|
||||
// aggregation-specific order key validation only applies to aggregation queries
|
||||
if requestType.IsAggregation() {
|
||||
return q.validateOrderByForAggregation()
|
||||
}
|
||||
|
||||
@@ -448,8 +438,8 @@ func (q *QueryBuilderQuery[T]) validateOrderByForAggregation() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Validate validates the entire query range request.
|
||||
func (r *QueryRangeRequest) Validate(opts ...ValidationOption) error {
|
||||
// ValidateQueryRangeRequest validates the entire query range request
|
||||
func (r *QueryRangeRequest) Validate() error {
|
||||
// Validate time range
|
||||
if r.RequestType != RequestTypeRawStream && r.Start >= r.End {
|
||||
return errors.NewInvalidInputf(
|
||||
@@ -460,10 +450,8 @@ func (r *QueryRangeRequest) Validate(opts ...ValidationOption) error {
|
||||
|
||||
// Validate request type
|
||||
switch r.RequestType {
|
||||
case RequestTypeRaw, RequestTypeRawStream, RequestTypeTrace:
|
||||
opts = append(opts, getValidationOptions(false)...)
|
||||
case RequestTypeTimeSeries, RequestTypeScalar:
|
||||
opts = append(opts, getValidationOptions(true)...)
|
||||
case RequestTypeRaw, RequestTypeRawStream, RequestTypeTimeSeries, RequestTypeScalar, RequestTypeTrace:
|
||||
// Valid request types
|
||||
default:
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
@@ -475,7 +463,7 @@ func (r *QueryRangeRequest) Validate(opts ...ValidationOption) error {
|
||||
}
|
||||
|
||||
// Validate composite query
|
||||
if err := r.CompositeQuery.Validate(opts...); err != nil {
|
||||
if err := r.validateCompositeQuery(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -490,7 +478,7 @@ func (r *QueryRangeRequest) Validate(opts ...ValidationOption) error {
|
||||
// validateAllQueriesNotDisabled validates that at least one query in the composite query is enabled
|
||||
func (r *QueryRangeRequest) validateAllQueriesNotDisabled() error {
|
||||
for _, envelope := range r.CompositeQuery.Queries {
|
||||
if !envelope.IsDisabled() {
|
||||
if !envelope.isDisabled() {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -501,8 +489,12 @@ func (r *QueryRangeRequest) validateAllQueriesNotDisabled() error {
|
||||
)
|
||||
}
|
||||
|
||||
func (r *QueryRangeRequest) validateCompositeQuery() error {
|
||||
return r.CompositeQuery.Validate(r.RequestType)
|
||||
}
|
||||
|
||||
// Validate performs validation on CompositeQuery
|
||||
func (c *CompositeQuery) Validate(opts ...ValidationOption) error {
|
||||
func (c *CompositeQuery) Validate(requestType RequestType) error {
|
||||
if len(c.Queries) == 0 {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
@@ -514,14 +506,14 @@ func (c *CompositeQuery) Validate(opts ...ValidationOption) error {
|
||||
queryNames := make(map[string]bool)
|
||||
|
||||
for i, envelope := range c.Queries {
|
||||
if err := validateQueryEnvelope(envelope, opts...); err != nil {
|
||||
if err := validateQueryEnvelope(envelope, requestType); err != nil {
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return wrapValidationError(err, queryId, "invalid %s: %s")
|
||||
}
|
||||
|
||||
// Check name uniqueness for builder queries
|
||||
if envelope.Type == QueryTypeBuilder || envelope.Type == QueryTypeSubQuery {
|
||||
name := envelope.GetQueryName()
|
||||
name := envelope.queryName()
|
||||
if name != "" {
|
||||
if queryNames[name] {
|
||||
return errors.NewInvalidInputf(
|
||||
@@ -538,16 +530,16 @@ func (c *CompositeQuery) Validate(opts ...ValidationOption) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateQueryEnvelope(envelope QueryEnvelope, opts ...ValidationOption) error {
|
||||
func validateQueryEnvelope(envelope QueryEnvelope, requestType RequestType) error {
|
||||
switch envelope.Type {
|
||||
case QueryTypeBuilder, QueryTypeSubQuery:
|
||||
switch spec := envelope.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Validate(opts...)
|
||||
return spec.Validate(requestType)
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Validate(opts...)
|
||||
return spec.Validate(requestType)
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Validate(opts...)
|
||||
return spec.Validate(requestType)
|
||||
default:
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
@@ -633,11 +625,3 @@ func validateQueryEnvelope(envelope QueryEnvelope, opts ...ValidationOption) err
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func getValidationOptions(isAggregationQuery bool) []ValidationOption {
|
||||
if isAggregationQuery {
|
||||
return []ValidationOption{WithSkipSelectFieldValidation()}
|
||||
}
|
||||
return []ValidationOption{WithSkipAggregationValidation(), WithSkipHavingValidation(), WithSkipAggregationOrderBy(), WithSkipGroupByValidation()}
|
||||
|
||||
}
|
||||
|
||||
@@ -743,7 +743,7 @@ func TestValidateQueryEnvelope(t *testing.T) {
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := validateQueryEnvelope(tt.envelope)
|
||||
err := validateQueryEnvelope(tt.envelope, tt.requestType)
|
||||
if tt.wantErr {
|
||||
if err == nil {
|
||||
t.Errorf("validateQueryEnvelope() expected error but got none")
|
||||
@@ -816,7 +816,7 @@ func TestQueryEnvelope_Helpers(t *testing.T) {
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := tt.envelope.GetQueryName()
|
||||
got := tt.envelope.queryName()
|
||||
if got != tt.want {
|
||||
t.Errorf("queryName() = %q, want %q", got, tt.want)
|
||||
}
|
||||
@@ -868,7 +868,7 @@ func TestQueryEnvelope_Helpers(t *testing.T) {
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := tt.envelope.IsDisabled()
|
||||
got := tt.envelope.isDisabled()
|
||||
if got != tt.want {
|
||||
t.Errorf("isDisabled() = %v, want %v", got, tt.want)
|
||||
}
|
||||
@@ -1107,7 +1107,7 @@ func TestQueryRangeRequest_ValidateOrderByForAggregation(t *testing.T) {
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := tt.query.Validate(getValidationOptions(true)...)
|
||||
err := tt.query.Validate(RequestTypeTimeSeries)
|
||||
if tt.wantErr {
|
||||
if err == nil {
|
||||
t.Errorf("validateOrderByForAggregation() expected error but got none")
|
||||
@@ -1161,7 +1161,7 @@ func TestNonAggregationFieldsSkipped(t *testing.T) {
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service.name"}},
|
||||
},
|
||||
}
|
||||
err := query.Validate(getValidationOptions(false)...)
|
||||
err := query.Validate(RequestTypeRaw)
|
||||
if err != nil {
|
||||
t.Errorf("expected no error for groupBy with raw request type, got: %v", err)
|
||||
}
|
||||
@@ -1178,7 +1178,7 @@ func TestNonAggregationFieldsSkipped(t *testing.T) {
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: ""}},
|
||||
},
|
||||
}
|
||||
err := query.Validate(getValidationOptions(true)...)
|
||||
err := query.Validate(RequestTypeTimeSeries)
|
||||
if err == nil {
|
||||
t.Errorf("expected error for empty groupBy key with timeseries request type")
|
||||
}
|
||||
@@ -1190,7 +1190,7 @@ func TestNonAggregationFieldsSkipped(t *testing.T) {
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Having: &Having{Expression: "count() > 10"},
|
||||
}
|
||||
err := query.Validate(getValidationOptions(false)...)
|
||||
err := query.Validate(RequestTypeRaw)
|
||||
if err != nil {
|
||||
t.Errorf("expected no error for having with raw request type, got: %v", err)
|
||||
}
|
||||
@@ -1202,7 +1202,7 @@ func TestNonAggregationFieldsSkipped(t *testing.T) {
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Having: &Having{Expression: "count() > 10"},
|
||||
}
|
||||
err := query.Validate(getValidationOptions(false)...)
|
||||
err := query.Validate(RequestTypeTrace)
|
||||
if err != nil {
|
||||
t.Errorf("expected no error for having with trace request type, got: %v", err)
|
||||
}
|
||||
@@ -1216,7 +1216,7 @@ func TestNonAggregationFieldsSkipped(t *testing.T) {
|
||||
{Expression: "count()"},
|
||||
},
|
||||
}
|
||||
err := query.Validate(getValidationOptions(false)...)
|
||||
err := query.Validate(RequestTypeRaw)
|
||||
if err != nil {
|
||||
t.Errorf("expected no error for aggregations with raw request type, got: %v", err)
|
||||
}
|
||||
@@ -1230,7 +1230,7 @@ func TestNonAggregationFieldsSkipped(t *testing.T) {
|
||||
{Expression: "count()"},
|
||||
},
|
||||
}
|
||||
err := query.Validate(getValidationOptions(false)...)
|
||||
err := query.Validate(RequestTypeRawStream)
|
||||
if err != nil {
|
||||
t.Errorf("expected no error for aggregations with raw_stream request type, got: %v", err)
|
||||
}
|
||||
@@ -1248,12 +1248,12 @@ func TestNonAggregationFieldsSkipped(t *testing.T) {
|
||||
},
|
||||
}
|
||||
// Should error for raw (selectFields are validated)
|
||||
err := query.Validate(getValidationOptions(false)...)
|
||||
err := query.Validate(RequestTypeRaw)
|
||||
if err == nil {
|
||||
t.Errorf("expected error for isRoot in selectFields with raw request type")
|
||||
}
|
||||
// Should pass for timeseries (selectFields skipped)
|
||||
err = query.Validate(getValidationOptions(true)...)
|
||||
err = query.Validate(RequestTypeTimeSeries)
|
||||
if err != nil {
|
||||
t.Errorf("expected no error for isRoot in selectFields with timeseries request type, got: %v", err)
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ type Store interface {
|
||||
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*StorableRole, error)
|
||||
List(context.Context, valuer.UUID) ([]*StorableRole, error)
|
||||
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*StorableRole, error)
|
||||
ListByOrgIDAndIDs(context.Context, valuer.UUID, []valuer.UUID) ([]*StorableRole, error)
|
||||
Update(context.Context, valuer.UUID, *StorableRole) error
|
||||
Delete(context.Context, valuer.UUID, valuer.UUID) error
|
||||
RunInTx(context.Context, func(ctx context.Context) error) error
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
package types
|
||||
package savedviewtypes
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type SavedView struct {
|
||||
bun.BaseModel `bun:"table:saved_views"`
|
||||
|
||||
Identifiable
|
||||
TimeAuditable
|
||||
UserAuditable
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
types.UserAuditable
|
||||
OrgID string `json:"orgId" bun:"org_id,notnull"`
|
||||
Name string `json:"name" bun:"name,type:text,notnull"`
|
||||
Category string `json:"category" bun:"category,type:text,notnull"`
|
||||
161
pkg/types/serviceaccounttypes/factor_api_key.go
Normal file
161
pkg/types/serviceaccounttypes/factor_api_key.go
Normal file
@@ -0,0 +1,161 @@
|
||||
package serviceaccounttypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrCodeServiceAccountFactorAPIkeyInvalidInput = errors.MustNewCode("service_account_factor_api_key_invalid_input")
|
||||
ErrCodeServiceAccountFactorAPIKeyAlreadyExists = errors.MustNewCode("service_account_factor_api_key_already_exists")
|
||||
ErrCodeServiceAccounFactorAPIKeytNotFound = errors.MustNewCode("service_account_factor_api_key_not_found")
|
||||
)
|
||||
|
||||
type StorableFactorAPIKey struct {
|
||||
bun.BaseModel `bun:"table:factor_api_key"`
|
||||
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Name string `bun:"name"`
|
||||
Key string `bun:"key"`
|
||||
ExpiresAt uint64 `bun:"expires_at"`
|
||||
LastUsed time.Time `bun:"last_used"`
|
||||
ServiceAccountID string `bun:"service_account_id"`
|
||||
}
|
||||
|
||||
type FactorAPIKey struct {
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Name string `json:"name" requrired:"true"`
|
||||
Key string `json:"key" required:"true"`
|
||||
ExpiresAt uint64 `json:"expires_at" required:"true"`
|
||||
LastUsed time.Time `json:"last_used" required:"true"`
|
||||
ServiceAccountID valuer.UUID `json:"service_account_id" required:"true"`
|
||||
}
|
||||
|
||||
type GettableFactorAPIKeyWithKey struct {
|
||||
types.Identifiable
|
||||
Key string `json:"key" required:"true"`
|
||||
}
|
||||
|
||||
type GettableFactorAPIKey struct {
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Name string `json:"name" requrired:"true"`
|
||||
ExpiresAt uint64 `json:"expires_at" required:"true"`
|
||||
LastUsed time.Time `json:"last_used" required:"true"`
|
||||
ServiceAccountID valuer.UUID `json:"service_account_id" required:"true"`
|
||||
}
|
||||
|
||||
type PostableFactorAPIKey struct {
|
||||
Name string `json:"name" required:"true"`
|
||||
ExpiresAt uint64 `json:"expires_at" required:"true"`
|
||||
}
|
||||
|
||||
type UpdatableFactorAPIKey struct {
|
||||
Name string `json:"name" required:"true"`
|
||||
ExpiresAt uint64 `json:"expires_at" required:"true"`
|
||||
}
|
||||
|
||||
func NewFactorAPIKeyFromStorable(storable *StorableFactorAPIKey) *FactorAPIKey {
|
||||
return &FactorAPIKey{
|
||||
Identifiable: storable.Identifiable,
|
||||
TimeAuditable: storable.TimeAuditable,
|
||||
Name: storable.Name,
|
||||
Key: storable.Key,
|
||||
ExpiresAt: storable.ExpiresAt,
|
||||
LastUsed: storable.LastUsed,
|
||||
ServiceAccountID: valuer.MustNewUUID(storable.ServiceAccountID),
|
||||
}
|
||||
}
|
||||
|
||||
func NewFactorAPIKeyFromStorables(storables []*StorableFactorAPIKey) []*FactorAPIKey {
|
||||
factorAPIKeys := make([]*FactorAPIKey, len(storables))
|
||||
|
||||
for idx, storable := range storables {
|
||||
factorAPIKeys[idx] = NewFactorAPIKeyFromStorable(storable)
|
||||
}
|
||||
|
||||
return factorAPIKeys
|
||||
}
|
||||
|
||||
func NewStorableFactorAPIKey(factorAPIKey *FactorAPIKey) *StorableFactorAPIKey {
|
||||
return &StorableFactorAPIKey{
|
||||
Identifiable: factorAPIKey.Identifiable,
|
||||
TimeAuditable: factorAPIKey.TimeAuditable,
|
||||
Name: factorAPIKey.Name,
|
||||
Key: factorAPIKey.Key,
|
||||
ExpiresAt: factorAPIKey.ExpiresAt,
|
||||
LastUsed: factorAPIKey.LastUsed,
|
||||
ServiceAccountID: factorAPIKey.ServiceAccountID.String(),
|
||||
}
|
||||
}
|
||||
|
||||
func NewGettableFactorAPIKeys(keys []*FactorAPIKey) []*GettableFactorAPIKey {
|
||||
gettables := make([]*GettableFactorAPIKey, len(keys))
|
||||
|
||||
for idx, key := range keys {
|
||||
gettables[idx] = &GettableFactorAPIKey{
|
||||
Identifiable: key.Identifiable,
|
||||
TimeAuditable: key.TimeAuditable,
|
||||
Name: key.Name,
|
||||
ExpiresAt: key.ExpiresAt,
|
||||
LastUsed: key.LastUsed,
|
||||
ServiceAccountID: key.ServiceAccountID,
|
||||
}
|
||||
}
|
||||
|
||||
return gettables
|
||||
}
|
||||
|
||||
func NewGettableFactorAPIKeyWithKey(id valuer.UUID, key string) *GettableFactorAPIKeyWithKey {
|
||||
return &GettableFactorAPIKeyWithKey{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: id,
|
||||
},
|
||||
Key: key,
|
||||
}
|
||||
}
|
||||
|
||||
func (apiKey *FactorAPIKey) Update(name string, expiresAt uint64) {
|
||||
apiKey.Name = name
|
||||
apiKey.ExpiresAt = expiresAt
|
||||
apiKey.UpdatedAt = time.Now()
|
||||
}
|
||||
|
||||
func (key *PostableFactorAPIKey) UnmarshalJSON(data []byte) error {
|
||||
type Alias PostableFactorAPIKey
|
||||
|
||||
var temp Alias
|
||||
if err := json.Unmarshal(data, &temp); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if temp.Name == "" {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeServiceAccountFactorAPIkeyInvalidInput, "name cannot be empty")
|
||||
}
|
||||
|
||||
*key = PostableFactorAPIKey(temp)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (key *UpdatableFactorAPIKey) UnmarshalJSON(data []byte) error {
|
||||
type Alias UpdatableFactorAPIKey
|
||||
|
||||
var temp Alias
|
||||
if err := json.Unmarshal(data, &temp); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if temp.Name == "" {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeServiceAccountFactorAPIkeyInvalidInput, "name cannot be empty")
|
||||
}
|
||||
|
||||
*key = UpdatableFactorAPIKey(temp)
|
||||
return nil
|
||||
}
|
||||
253
pkg/types/serviceaccounttypes/service_acccount.go
Normal file
253
pkg/types/serviceaccounttypes/service_acccount.go
Normal file
@@ -0,0 +1,253 @@
|
||||
package serviceaccounttypes
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrCodeServiceAccountInvalidInput = errors.MustNewCode("service_account_invalid_input")
|
||||
ErrCodeServiceAccountAlreadyExists = errors.MustNewCode("service_account_already_exists")
|
||||
ErrCodeServiceAccountNotFound = errors.MustNewCode("service_account_not_found")
|
||||
ErrCodeServiceAccountRoleAlreadyExists = errors.MustNewCode("service_account_role_already_exists")
|
||||
)
|
||||
|
||||
var (
|
||||
StatusActive = valuer.NewString("active")
|
||||
StatusDisabled = valuer.NewString("disabled")
|
||||
ValidStatus = []valuer.String{StatusActive, StatusDisabled}
|
||||
)
|
||||
|
||||
type StorableServiceAccount struct {
|
||||
bun.BaseModel `bun:"table:service_account,alias:service_account"`
|
||||
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Name string `bun:"name"`
|
||||
Email string `bun:"email"`
|
||||
Status valuer.String `bun:"status"`
|
||||
OrgID string `bun:"org_id"`
|
||||
}
|
||||
|
||||
type ServiceAccount struct {
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Name string `json:"name" required:"true"`
|
||||
Email valuer.Email `json:"email" required:"true"`
|
||||
Roles []string `json:"roles" required:"true" nullable:"false"`
|
||||
Status valuer.String `json:"status" required:"true"`
|
||||
OrgID valuer.UUID `json:"orgID" required:"true"`
|
||||
}
|
||||
|
||||
type PostableServiceAccount struct {
|
||||
Name string `json:"name" required:"true"`
|
||||
Email valuer.Email `json:"email" required:"true"`
|
||||
Roles []string `json:"roles" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
type UpdatableServiceAccount struct {
|
||||
Name string `json:"name" required:"true"`
|
||||
Email valuer.Email `json:"email" required:"true"`
|
||||
Roles []string `json:"roles" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
type UpdatableServiceAccountStatus struct {
|
||||
Status valuer.String `json:"status" required:"true"`
|
||||
}
|
||||
|
||||
func NewServiceAccount(name string, email valuer.Email, roles []string, status valuer.String, orgID valuer.UUID) *ServiceAccount {
|
||||
return &ServiceAccount{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
Name: name,
|
||||
Email: email,
|
||||
Roles: roles,
|
||||
Status: status,
|
||||
OrgID: orgID,
|
||||
}
|
||||
}
|
||||
|
||||
func NewServiceAccountFromStorables(storableServiceAccount *StorableServiceAccount, roles []string) *ServiceAccount {
|
||||
return &ServiceAccount{
|
||||
Identifiable: storableServiceAccount.Identifiable,
|
||||
TimeAuditable: storableServiceAccount.TimeAuditable,
|
||||
Name: storableServiceAccount.Name,
|
||||
Email: valuer.MustNewEmail(storableServiceAccount.Email),
|
||||
Roles: roles,
|
||||
Status: storableServiceAccount.Status,
|
||||
OrgID: valuer.MustNewUUID(storableServiceAccount.OrgID),
|
||||
}
|
||||
}
|
||||
|
||||
func NewServiceAccountsFromRoles(storableServiceAccounts []*StorableServiceAccount, roles []*roletypes.Role, serviceAccountIDToRoleIDsMap map[string][]valuer.UUID) []*ServiceAccount {
|
||||
serviceAccounts := make([]*ServiceAccount, 0, len(storableServiceAccounts))
|
||||
|
||||
roleIDToRole := make(map[string]*roletypes.Role, len(roles))
|
||||
for _, role := range roles {
|
||||
roleIDToRole[role.ID.String()] = role
|
||||
}
|
||||
|
||||
for _, sa := range storableServiceAccounts {
|
||||
roleIDs := serviceAccountIDToRoleIDsMap[sa.ID.String()]
|
||||
|
||||
roleNames := make([]string, len(roleIDs))
|
||||
for idx, rid := range roleIDs {
|
||||
if role, ok := roleIDToRole[rid.String()]; ok {
|
||||
roleNames[idx] = role.Name
|
||||
}
|
||||
}
|
||||
|
||||
account := NewServiceAccountFromStorables(sa, roleNames)
|
||||
serviceAccounts = append(serviceAccounts, account)
|
||||
}
|
||||
|
||||
return serviceAccounts
|
||||
}
|
||||
|
||||
func NewStorableServiceAccount(serviceAccount *ServiceAccount) *StorableServiceAccount {
|
||||
return &StorableServiceAccount{
|
||||
Identifiable: serviceAccount.Identifiable,
|
||||
TimeAuditable: serviceAccount.TimeAuditable,
|
||||
Name: serviceAccount.Name,
|
||||
Email: serviceAccount.Email.String(),
|
||||
Status: serviceAccount.Status,
|
||||
OrgID: serviceAccount.OrgID.String(),
|
||||
}
|
||||
}
|
||||
|
||||
func (sa *ServiceAccount) Update(name string, email valuer.Email, roles []string) {
|
||||
sa.Name = name
|
||||
sa.Email = email
|
||||
sa.Roles = roles
|
||||
sa.UpdatedAt = time.Now()
|
||||
}
|
||||
|
||||
func (sa *ServiceAccount) UpdateStatus(status valuer.String) {
|
||||
sa.Status = status
|
||||
sa.UpdatedAt = time.Now()
|
||||
}
|
||||
|
||||
func (sa *ServiceAccount) NewFactorAPIKey(name string, expiresAt uint64) (*FactorAPIKey, error) {
|
||||
key := make([]byte, 32)
|
||||
_, err := rand.Read(key)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeInternal, errors.CodeInternal, "failed to generate token")
|
||||
}
|
||||
// Encode the token in base64.
|
||||
encodedKey := base64.StdEncoding.EncodeToString(key)
|
||||
|
||||
return &FactorAPIKey{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
Name: name,
|
||||
Key: encodedKey,
|
||||
ExpiresAt: expiresAt,
|
||||
LastUsed: time.Now(),
|
||||
ServiceAccountID: sa.ID,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (sa *ServiceAccount) PatchRoles(input *ServiceAccount) ([]string, []string) {
|
||||
currentRolesSet := make(map[string]struct{}, len(sa.Roles))
|
||||
inputRolesSet := make(map[string]struct{}, len(input.Roles))
|
||||
|
||||
for _, role := range sa.Roles {
|
||||
currentRolesSet[role] = struct{}{}
|
||||
}
|
||||
for _, role := range input.Roles {
|
||||
inputRolesSet[role] = struct{}{}
|
||||
}
|
||||
|
||||
// additions: roles present in input but not in current
|
||||
additions := []string{}
|
||||
for _, role := range input.Roles {
|
||||
if _, exists := currentRolesSet[role]; !exists {
|
||||
additions = append(additions, role)
|
||||
}
|
||||
}
|
||||
|
||||
// deletions: roles present in current but not in input
|
||||
deletions := []string{}
|
||||
for _, role := range sa.Roles {
|
||||
if _, exists := inputRolesSet[role]; !exists {
|
||||
deletions = append(deletions, role)
|
||||
}
|
||||
}
|
||||
|
||||
return additions, deletions
|
||||
}
|
||||
|
||||
func (sa *PostableServiceAccount) UnmarshalJSON(data []byte) error {
|
||||
type Alias PostableServiceAccount
|
||||
|
||||
var temp Alias
|
||||
if err := json.Unmarshal(data, &temp); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if temp.Name == "" {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeServiceAccountInvalidInput, "name cannot be empty")
|
||||
}
|
||||
|
||||
if len(temp.Roles) == 0 {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeServiceAccountInvalidInput, "roles cannot be empty")
|
||||
}
|
||||
|
||||
*sa = PostableServiceAccount(temp)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sa *UpdatableServiceAccount) UnmarshalJSON(data []byte) error {
|
||||
type Alias UpdatableServiceAccount
|
||||
|
||||
var temp Alias
|
||||
if err := json.Unmarshal(data, &temp); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if temp.Name == "" {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeServiceAccountInvalidInput, "name cannot be empty")
|
||||
}
|
||||
|
||||
if len(temp.Roles) == 0 {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeServiceAccountInvalidInput, "roles cannot be empty")
|
||||
}
|
||||
|
||||
*sa = UpdatableServiceAccount(temp)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sa *UpdatableServiceAccountStatus) UnmarshalJSON(data []byte) error {
|
||||
type Alias UpdatableServiceAccountStatus
|
||||
|
||||
var temp Alias
|
||||
if err := json.Unmarshal(data, &temp); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !slices.Contains(ValidStatus, temp.Status) {
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeServiceAccountInvalidInput, "invalid status: %s, allowed status are: %v", temp.Status, ValidStatus)
|
||||
}
|
||||
|
||||
*sa = UpdatableServiceAccountStatus(temp)
|
||||
return nil
|
||||
}
|
||||
81
pkg/types/serviceaccounttypes/service_account_role.go
Normal file
81
pkg/types/serviceaccounttypes/service_account_role.go
Normal file
@@ -0,0 +1,81 @@
|
||||
package serviceaccounttypes
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type StorableServiceAccountRole struct {
|
||||
bun.BaseModel `bun:"table:service_account_role,alias:service_account_role"`
|
||||
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
ServiceAccountID string `bun:"service_account_id"`
|
||||
RoleID string `bun:"role_id"`
|
||||
}
|
||||
|
||||
func NewStorableServiceAccountRoles(serviceAccountID valuer.UUID, roles []*roletypes.Role) []*StorableServiceAccountRole {
|
||||
storableServiceAccountRoles := make([]*StorableServiceAccountRole, len(roles))
|
||||
for idx, role := range roles {
|
||||
storableServiceAccountRoles[idx] = &StorableServiceAccountRole{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
ServiceAccountID: serviceAccountID.String(),
|
||||
RoleID: role.ID.String(),
|
||||
}
|
||||
}
|
||||
|
||||
return storableServiceAccountRoles
|
||||
}
|
||||
|
||||
func NewRolesFromStorableServiceAccountRoles(storable []*StorableServiceAccountRole, roles []*roletypes.Role) ([]string, error) {
|
||||
roleIDToName := make(map[string]string, len(roles))
|
||||
for _, role := range roles {
|
||||
roleIDToName[role.ID.String()] = role.Name
|
||||
}
|
||||
|
||||
names := make([]string, 0, len(storable))
|
||||
for _, sar := range storable {
|
||||
roleName, ok := roleIDToName[sar.RoleID]
|
||||
if !ok {
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "role id %s not found in provided roles", sar.RoleID)
|
||||
}
|
||||
names = append(names, roleName)
|
||||
}
|
||||
|
||||
return names, nil
|
||||
}
|
||||
|
||||
func GetUniqueRolesAndServiceAccountMapping(storableServiceAccountRoles []*StorableServiceAccountRole) (map[string][]valuer.UUID, []valuer.UUID) {
|
||||
serviceAccountIDRoles := make(map[string][]valuer.UUID)
|
||||
uniqueRoleIDSet := make(map[string]struct{})
|
||||
|
||||
for _, sar := range storableServiceAccountRoles {
|
||||
saID := sar.ServiceAccountID
|
||||
roleID := sar.RoleID
|
||||
if _, ok := serviceAccountIDRoles[saID]; !ok {
|
||||
serviceAccountIDRoles[saID] = make([]valuer.UUID, 0)
|
||||
}
|
||||
|
||||
roleUUID := valuer.MustNewUUID(roleID)
|
||||
serviceAccountIDRoles[saID] = append(serviceAccountIDRoles[saID], roleUUID)
|
||||
uniqueRoleIDSet[roleID] = struct{}{}
|
||||
}
|
||||
|
||||
roleIDs := make([]valuer.UUID, 0, len(uniqueRoleIDSet))
|
||||
for rid := range uniqueRoleIDSet {
|
||||
roleIDs = append(roleIDs, valuer.MustNewUUID(rid))
|
||||
}
|
||||
|
||||
return serviceAccountIDRoles, roleIDs
|
||||
}
|
||||
33
pkg/types/serviceaccounttypes/store.go
Normal file
33
pkg/types/serviceaccounttypes/store.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package serviceaccounttypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Store interface {
|
||||
// Service Account
|
||||
Create(context.Context, *StorableServiceAccount) error
|
||||
Get(context.Context, valuer.UUID, valuer.UUID) (*StorableServiceAccount, error)
|
||||
GetByID(context.Context, valuer.UUID) (*StorableServiceAccount, error)
|
||||
List(context.Context, valuer.UUID) ([]*StorableServiceAccount, error)
|
||||
Update(context.Context, valuer.UUID, *StorableServiceAccount) error
|
||||
Delete(context.Context, valuer.UUID, valuer.UUID) error
|
||||
|
||||
// Service Account Role
|
||||
CreateServiceAccountRoles(context.Context, []*StorableServiceAccountRole) error
|
||||
GetServiceAccountRoles(context.Context, valuer.UUID) ([]*StorableServiceAccountRole, error)
|
||||
ListServiceAccountRolesByOrgID(context.Context, valuer.UUID) ([]*StorableServiceAccountRole, error)
|
||||
DeleteServiceAccountRoles(context.Context, valuer.UUID) error
|
||||
|
||||
// Service Account Factor API Key
|
||||
CreateFactorAPIKey(context.Context, *StorableFactorAPIKey) error
|
||||
GetFactorAPIKey(context.Context, valuer.UUID, valuer.UUID) (*StorableFactorAPIKey, error)
|
||||
ListFactorAPIKey(context.Context, valuer.UUID) ([]*StorableFactorAPIKey, error)
|
||||
UpdateFactorAPIKey(context.Context, valuer.UUID, *StorableFactorAPIKey) error
|
||||
RevokeFactorAPIKey(context.Context, valuer.UUID, valuer.UUID) error
|
||||
RevokeAllFactorAPIKeys(context.Context, valuer.UUID) error
|
||||
|
||||
RunInTx(context.Context, func(context.Context) error) error
|
||||
}
|
||||
88
templates/email/api_key_event.gotmpl
Normal file
88
templates/email/api_key_event.gotmpl
Normal file
@@ -0,0 +1,88 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<title>{{.subject}}</title>
|
||||
</head>
|
||||
|
||||
<body style="margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,'Helvetica Neue',Arial,sans-serif;line-height:1.6;color:#333;background:#fff">
|
||||
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="background:#fff">
|
||||
<tr>
|
||||
<td align="center" style="padding:0">
|
||||
<table role="presentation" width="600" cellspacing="0" cellpadding="0" border="0" style="max-width:600px;width:100%">
|
||||
{{ if .format.Header.Enabled }}
|
||||
<tr>
|
||||
<td align="center" style="padding:16px 20px 16px">
|
||||
<img src="{{.format.Header.LogoURL}}" alt="SigNoz" width="160" height="40" style="display:block;border:0;outline:none;max-width:100%;height:auto">
|
||||
</td>
|
||||
</tr>
|
||||
{{ end }}
|
||||
<tr>
|
||||
<td style="padding:16px 20px 16px">
|
||||
<p style="margin:0 0 16px;font-size:16px;color:#333">
|
||||
Hi there,
|
||||
</p>
|
||||
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
|
||||
An API key was {{.Event}} for your service account <strong>{{.Name}}</strong>.
|
||||
</p>
|
||||
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
|
||||
<tr>
|
||||
<td style="padding:20px;background:#f5f5f5;border-radius:6px;border-left:4px solid #4E74F8">
|
||||
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0">
|
||||
<tr>
|
||||
<td style="padding:0 0 8px">
|
||||
<p style="margin:0;font-size:15px;color:#333;line-height:1.6">
|
||||
<strong>Key ID:</strong> {{.KeyID}}
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td style="padding:0 0 8px">
|
||||
<p style="margin:0;font-size:15px;color:#333;line-height:1.6">
|
||||
<strong>Key Name:</strong> {{.KeyName}}
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td style="padding:0 0 8px">
|
||||
<p style="margin:0;font-size:15px;color:#333;line-height:1.6">
|
||||
<strong>Created At:</strong> {{.KeyCreatedAt}}
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
{{ if .format.Help.Enabled }}
|
||||
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
|
||||
Need help? Chat with our team in the SigNoz application or email us at <a href="mailto:{{.format.Help.Email}}" style="color:#4E74F8;text-decoration:none">{{.format.Help.Email}}</a>.
|
||||
</p>
|
||||
{{ end }}
|
||||
<p style="margin:0;font-size:16px;color:#333;line-height:1.6">
|
||||
Thanks,<br><strong>The SigNoz Team</strong>
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
{{ if .format.Footer.Enabled }}
|
||||
<tr>
|
||||
<td align="center" style="padding:8px 16px 8px">
|
||||
<p style="margin:0 0 8px;font-size:12px;color:#999;line-height:1.5">
|
||||
<a href="https://signoz.io/terms-of-service/" style="color:#4E74F8;text-decoration:none">Terms of Service</a> - <a href="https://signoz.io/privacy/" style="color:#4E74F8;text-decoration:none">Privacy Policy</a>
|
||||
</p>
|
||||
<p style="margin:0;font-size:12px;color:#999;line-height:1.5">
|
||||
© 2026 SigNoz Inc.
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
{{ end }}
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
@@ -1,634 +0,0 @@
|
||||
import csv
|
||||
import io
|
||||
import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, List
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logs import Logs
|
||||
|
||||
|
||||
def test_export_logs_csv(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert 3 logs with different severity levels and attributes.
|
||||
|
||||
Tests:
|
||||
1. Export logs as CSV format
|
||||
2. Verify CSV structure and content
|
||||
3. Validate headers are present
|
||||
4. Check log data is correctly formatted
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_logs(
|
||||
[
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=10),
|
||||
body="Application started successfully",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "api-service",
|
||||
"deployment.environment": "production",
|
||||
"host.name": "server-01",
|
||||
},
|
||||
attributes={
|
||||
"http.method": "GET",
|
||||
"http.status_code": 200,
|
||||
"user.id": "user123",
|
||||
},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=8),
|
||||
body="Connection to database failed",
|
||||
severity_text="ERROR",
|
||||
resources={
|
||||
"service.name": "api-service",
|
||||
"deployment.environment": "production",
|
||||
"host.name": "server-01",
|
||||
},
|
||||
attributes={
|
||||
"error.type": "ConnectionError",
|
||||
"db.name": "production_db",
|
||||
},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=5),
|
||||
body="Request processed",
|
||||
severity_text="DEBUG",
|
||||
resources={
|
||||
"service.name": "worker-service",
|
||||
"deployment.environment": "production",
|
||||
"host.name": "server-02",
|
||||
},
|
||||
attributes={
|
||||
"request.id": "req-456",
|
||||
"duration_ms": 150.5,
|
||||
},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
}
|
||||
|
||||
# Export logs as CSV (default format, no source needed)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/export_raw_data?{urlencode(params)}"
|
||||
),
|
||||
timeout=30,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "text/csv"
|
||||
assert "attachment" in response.headers.get("Content-Disposition", "")
|
||||
|
||||
# Parse CSV content
|
||||
csv_content = response.text
|
||||
csv_reader = csv.DictReader(io.StringIO(csv_content))
|
||||
|
||||
rows = list(csv_reader)
|
||||
assert len(rows) == 3, f"Expected 3 rows, got {len(rows)}"
|
||||
|
||||
# Verify log bodies are present in the exported data
|
||||
bodies = [row.get("body") for row in rows]
|
||||
assert "Application started successfully" in bodies
|
||||
assert "Connection to database failed" in bodies
|
||||
assert "Request processed" in bodies
|
||||
|
||||
# Verify severity levels
|
||||
severities = [row.get("severity_text") for row in rows]
|
||||
assert "INFO" in severities
|
||||
assert "ERROR" in severities
|
||||
assert "DEBUG" in severities
|
||||
|
||||
|
||||
def test_export_logs_jsonl(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert 2 logs with different attributes.
|
||||
|
||||
Tests:
|
||||
1. Export logs as JSONL format
|
||||
2. Verify JSONL structure and content
|
||||
3. Check each line is valid JSON
|
||||
4. Validate log data is correctly formatted
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_logs(
|
||||
[
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=10),
|
||||
body="User logged in",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "auth-service",
|
||||
"deployment.environment": "staging",
|
||||
},
|
||||
attributes={
|
||||
"user.email": "test@example.com",
|
||||
"session.id": "sess-789",
|
||||
},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=5),
|
||||
body="Payment processed successfully",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "payment-service",
|
||||
"deployment.environment": "staging",
|
||||
},
|
||||
attributes={
|
||||
"transaction.id": "txn-123",
|
||||
"amount": 99.99,
|
||||
},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "jsonl",
|
||||
"source": "logs",
|
||||
}
|
||||
|
||||
# Export logs as JSONL
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/export_raw_data?{urlencode(params)}"
|
||||
),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "application/x-ndjson"
|
||||
assert "attachment" in response.headers.get("Content-Disposition", "")
|
||||
|
||||
# Parse JSONL content
|
||||
jsonl_lines = response.text.strip().split("\n")
|
||||
assert len(jsonl_lines) == 2, f"Expected 2 lines, got {len(jsonl_lines)}"
|
||||
|
||||
# Verify each line is valid JSON
|
||||
json_objects = []
|
||||
for line in jsonl_lines:
|
||||
obj = json.loads(line)
|
||||
json_objects.append(obj)
|
||||
assert "id" in obj
|
||||
assert "timestamp" in obj
|
||||
assert "body" in obj
|
||||
assert "severity_text" in obj
|
||||
|
||||
# Verify log bodies
|
||||
bodies = [obj.get("body") for obj in json_objects]
|
||||
assert "User logged in" in bodies
|
||||
assert "Payment processed successfully" in bodies
|
||||
|
||||
|
||||
def test_export_logs_with_filter(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert logs with different severity levels.
|
||||
|
||||
Tests:
|
||||
1. Export logs with filter applied
|
||||
2. Verify only filtered logs are returned
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_logs(
|
||||
[
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=10),
|
||||
body="Info message",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=8),
|
||||
body="Error message",
|
||||
severity_text="ERROR",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=5),
|
||||
body="Another error message",
|
||||
severity_text="ERROR",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "jsonl",
|
||||
"source": "logs",
|
||||
"filter": "severity_text = 'ERROR'",
|
||||
}
|
||||
|
||||
# Export logs with filter
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/export_raw_data?{urlencode(params)}"
|
||||
),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "application/x-ndjson"
|
||||
|
||||
# Parse JSONL content
|
||||
jsonl_lines = response.text.strip().split("\n")
|
||||
assert len(jsonl_lines) == 2, f"Expected 2 lines (filtered), got {len(jsonl_lines)}"
|
||||
|
||||
# Verify only ERROR logs are returned
|
||||
for line in jsonl_lines:
|
||||
obj = json.loads(line)
|
||||
assert obj["severity_text"] == "ERROR"
|
||||
assert "error message" in obj["body"].lower()
|
||||
|
||||
|
||||
def test_export_logs_with_limit(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert 5 logs.
|
||||
|
||||
Tests:
|
||||
1. Export logs with limit applied
|
||||
2. Verify only limited number of logs are returned
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
logs = []
|
||||
for i in range(5):
|
||||
logs.append(
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=i),
|
||||
body=f"Log message {i}",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
},
|
||||
attributes={
|
||||
"index": i,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "csv",
|
||||
"source": "logs",
|
||||
"limit": 3,
|
||||
}
|
||||
|
||||
# Export logs with limit
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/export_raw_data?{urlencode(params)}"
|
||||
),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "text/csv"
|
||||
|
||||
# Parse CSV content
|
||||
csv_content = response.text
|
||||
csv_reader = csv.DictReader(io.StringIO(csv_content))
|
||||
|
||||
rows = list(csv_reader)
|
||||
assert len(rows) == 3, f"Expected 3 rows (limited), got {len(rows)}"
|
||||
|
||||
|
||||
def test_export_logs_with_columns(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert logs with various attributes.
|
||||
|
||||
Tests:
|
||||
1. Export logs with specific columns
|
||||
2. Verify only specified columns are returned
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_logs(
|
||||
[
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=10),
|
||||
body="Test log message",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
"deployment.environment": "production",
|
||||
},
|
||||
attributes={
|
||||
"http.method": "GET",
|
||||
"http.status_code": 200,
|
||||
},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
# Request only specific columns
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "csv",
|
||||
"source": "logs",
|
||||
"columns": ["timestamp", "severity_text", "body"],
|
||||
}
|
||||
|
||||
# Export logs with specific columns
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/export_raw_data?{urlencode(params, doseq=True)}"
|
||||
),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "text/csv"
|
||||
|
||||
# Parse CSV content
|
||||
csv_content = response.text
|
||||
csv_reader = csv.DictReader(io.StringIO(csv_content))
|
||||
|
||||
rows = list(csv_reader)
|
||||
assert len(rows) == 1
|
||||
|
||||
# Verify the specified columns are present
|
||||
row = rows[0]
|
||||
assert "timestamp" in row
|
||||
assert "severity_text" in row
|
||||
assert "body" in row
|
||||
assert row["severity_text"] == "INFO"
|
||||
assert row["body"] == "Test log message"
|
||||
|
||||
|
||||
def test_export_logs_with_order_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert logs at different timestamps.
|
||||
|
||||
Tests:
|
||||
1. Export logs with ascending timestamp order
|
||||
2. Verify logs are returned in correct order
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_logs(
|
||||
[
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=10),
|
||||
body="First log",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=5),
|
||||
body="Second log",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=1),
|
||||
body="Third log",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "jsonl",
|
||||
"source": "logs",
|
||||
"order_by": "timestamp:asc",
|
||||
}
|
||||
|
||||
# Export logs with ascending order
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/export_raw_data?{urlencode(params)}"
|
||||
),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "application/x-ndjson"
|
||||
|
||||
# Parse JSONL content
|
||||
jsonl_lines = response.text.strip().split("\n")
|
||||
assert len(jsonl_lines) == 3
|
||||
|
||||
# Verify order - first log should be "First log" (oldest)
|
||||
json_objects = [json.loads(line) for line in jsonl_lines]
|
||||
assert json_objects[0]["body"] == "First log"
|
||||
assert json_objects[1]["body"] == "Second log"
|
||||
assert json_objects[2]["body"] == "Third log"
|
||||
|
||||
|
||||
def test_export_logs_with_complex_filter(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert logs with various service names and severity levels.
|
||||
|
||||
Tests:
|
||||
1. Export logs with complex filter (multiple conditions)
|
||||
2. Verify only logs matching all conditions are returned
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_logs(
|
||||
[
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=10),
|
||||
body="API error occurred",
|
||||
severity_text="ERROR",
|
||||
resources={
|
||||
"service.name": "api-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=8),
|
||||
body="Worker info message",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "worker-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=5),
|
||||
body="API info message",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "api-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
# Filter for api-service AND ERROR severity
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "jsonl",
|
||||
"source": "logs",
|
||||
"filter": "service.name = 'api-service' AND severity_text = 'ERROR'",
|
||||
}
|
||||
|
||||
# Export logs with complex filter
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/export_raw_data?{urlencode(params)}"
|
||||
),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "application/x-ndjson"
|
||||
|
||||
# Parse JSONL content
|
||||
jsonl_lines = response.text.strip().split("\n")
|
||||
assert (
|
||||
len(jsonl_lines) == 1
|
||||
), f"Expected 1 line (complex filter), got {len(jsonl_lines)}"
|
||||
|
||||
# Verify the filtered log
|
||||
filtered_obj = json.loads(jsonl_lines[0])
|
||||
assert filtered_obj["body"] == "API error occurred"
|
||||
assert filtered_obj["severity_text"] == "ERROR"
|
||||
@@ -1,782 +0,0 @@
|
||||
import csv
|
||||
import io
|
||||
import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, List
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.traces import TraceIdGenerator, Traces, TracesKind, TracesStatusCode
|
||||
|
||||
|
||||
def test_export_traces_csv(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert 3 traces with different attributes.
|
||||
|
||||
Tests:
|
||||
1. Export traces as CSV format
|
||||
2. Verify CSV structure and content
|
||||
3. Validate headers are present
|
||||
4. Check trace data is correctly formatted
|
||||
"""
|
||||
http_service_trace_id = TraceIdGenerator.trace_id()
|
||||
http_service_span_id = TraceIdGenerator.span_id()
|
||||
http_service_db_span_id = TraceIdGenerator.span_id()
|
||||
topic_service_trace_id = TraceIdGenerator.trace_id()
|
||||
topic_service_span_id = TraceIdGenerator.span_id()
|
||||
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_traces(
|
||||
[
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=4),
|
||||
duration=timedelta(seconds=3),
|
||||
trace_id=http_service_trace_id,
|
||||
span_id=http_service_span_id,
|
||||
parent_span_id="",
|
||||
name="POST /integration",
|
||||
kind=TracesKind.SPAN_KIND_SERVER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"deployment.environment": "production",
|
||||
"service.name": "http-service",
|
||||
"os.type": "linux",
|
||||
"host.name": "linux-000",
|
||||
},
|
||||
attributes={
|
||||
"net.transport": "IP.TCP",
|
||||
"http.scheme": "http",
|
||||
"http.user_agent": "Integration Test",
|
||||
"http.request.method": "POST",
|
||||
"http.response.status_code": "200",
|
||||
},
|
||||
),
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=3.5),
|
||||
duration=timedelta(seconds=0.5),
|
||||
trace_id=http_service_trace_id,
|
||||
span_id=http_service_db_span_id,
|
||||
parent_span_id=http_service_span_id,
|
||||
name="SELECT",
|
||||
kind=TracesKind.SPAN_KIND_CLIENT,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"deployment.environment": "production",
|
||||
"service.name": "http-service",
|
||||
"os.type": "linux",
|
||||
"host.name": "linux-000",
|
||||
},
|
||||
attributes={
|
||||
"db.name": "integration",
|
||||
"db.operation": "SELECT",
|
||||
"db.statement": "SELECT * FROM integration",
|
||||
},
|
||||
),
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=1),
|
||||
duration=timedelta(seconds=2),
|
||||
trace_id=topic_service_trace_id,
|
||||
span_id=topic_service_span_id,
|
||||
parent_span_id="",
|
||||
name="topic publish",
|
||||
kind=TracesKind.SPAN_KIND_PRODUCER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"deployment.environment": "production",
|
||||
"service.name": "topic-service",
|
||||
"os.type": "linux",
|
||||
"host.name": "linux-001",
|
||||
},
|
||||
attributes={
|
||||
"message.type": "SENT",
|
||||
"messaging.operation": "publish",
|
||||
"messaging.message.id": "001",
|
||||
},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"source": "traces",
|
||||
"limit": 1000,
|
||||
}
|
||||
|
||||
# Export traces as CSV (GET for simple queries)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/export_raw_data?{urlencode(params)}"
|
||||
),
|
||||
timeout=30,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "text/csv"
|
||||
assert "attachment" in response.headers.get("Content-Disposition", "")
|
||||
|
||||
# Parse CSV content
|
||||
csv_content = response.text
|
||||
csv_reader = csv.DictReader(io.StringIO(csv_content))
|
||||
|
||||
rows = list(csv_reader)
|
||||
assert len(rows) == 3, f"Expected 3 rows, got {len(rows)}"
|
||||
|
||||
# Verify trace IDs are present in the exported data
|
||||
trace_ids = [row.get("trace_id") for row in rows]
|
||||
assert http_service_trace_id in trace_ids
|
||||
assert topic_service_trace_id in trace_ids
|
||||
|
||||
# Verify span names are present
|
||||
span_names = [row.get("name") for row in rows]
|
||||
assert "POST /integration" in span_names
|
||||
assert "SELECT" in span_names
|
||||
assert "topic publish" in span_names
|
||||
|
||||
|
||||
def test_export_traces_jsonl(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert 2 traces with different attributes.
|
||||
|
||||
Tests:
|
||||
1. Export traces as JSONL format
|
||||
2. Verify JSONL structure and content
|
||||
3. Check each line is valid JSON
|
||||
4. Validate trace data is correctly formatted
|
||||
"""
|
||||
http_service_trace_id = TraceIdGenerator.trace_id()
|
||||
http_service_span_id = TraceIdGenerator.span_id()
|
||||
topic_service_trace_id = TraceIdGenerator.trace_id()
|
||||
topic_service_span_id = TraceIdGenerator.span_id()
|
||||
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_traces(
|
||||
[
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=4),
|
||||
duration=timedelta(seconds=3),
|
||||
trace_id=http_service_trace_id,
|
||||
span_id=http_service_span_id,
|
||||
parent_span_id="",
|
||||
name="POST /api/test",
|
||||
kind=TracesKind.SPAN_KIND_SERVER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "api-service",
|
||||
"deployment.environment": "staging",
|
||||
},
|
||||
attributes={
|
||||
"http.request.method": "POST",
|
||||
"http.response.status_code": "201",
|
||||
},
|
||||
),
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=2),
|
||||
duration=timedelta(seconds=1),
|
||||
trace_id=topic_service_trace_id,
|
||||
span_id=topic_service_span_id,
|
||||
parent_span_id="",
|
||||
name="queue.process",
|
||||
kind=TracesKind.SPAN_KIND_CONSUMER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "queue-service",
|
||||
"deployment.environment": "staging",
|
||||
},
|
||||
attributes={
|
||||
"messaging.operation": "process",
|
||||
"messaging.system": "rabbitmq",
|
||||
},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "jsonl",
|
||||
"source": "traces",
|
||||
"limit": 1000,
|
||||
}
|
||||
|
||||
# Export traces as JSONL (GET for simple queries)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/export_raw_data?{urlencode(params)}"
|
||||
),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "application/x-ndjson"
|
||||
assert "attachment" in response.headers.get("Content-Disposition", "")
|
||||
|
||||
# Parse JSONL content
|
||||
jsonl_lines = response.text.strip().split("\n")
|
||||
assert len(jsonl_lines) == 2, f"Expected 2 lines, got {len(jsonl_lines)}"
|
||||
|
||||
# Verify each line is valid JSON
|
||||
json_objects = []
|
||||
for line in jsonl_lines:
|
||||
obj = json.loads(line)
|
||||
json_objects.append(obj)
|
||||
assert "trace_id" in obj
|
||||
assert "span_id" in obj
|
||||
assert "name" in obj
|
||||
|
||||
# Verify trace IDs are present
|
||||
trace_ids = [obj.get("trace_id") for obj in json_objects]
|
||||
assert http_service_trace_id in trace_ids
|
||||
assert topic_service_trace_id in trace_ids
|
||||
|
||||
# Verify span names are present
|
||||
span_names = [obj.get("name") for obj in json_objects]
|
||||
assert "POST /api/test" in span_names
|
||||
assert "queue.process" in span_names
|
||||
|
||||
|
||||
def test_export_traces_with_filter(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert traces with different service names.
|
||||
|
||||
Tests:
|
||||
1. Export traces with filter applied
|
||||
2. Verify only filtered traces are returned
|
||||
"""
|
||||
service_a_trace_id = TraceIdGenerator.trace_id()
|
||||
service_a_span_id = TraceIdGenerator.span_id()
|
||||
service_b_trace_id = TraceIdGenerator.trace_id()
|
||||
service_b_span_id = TraceIdGenerator.span_id()
|
||||
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_traces(
|
||||
[
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=4),
|
||||
duration=timedelta(seconds=1),
|
||||
trace_id=service_a_trace_id,
|
||||
span_id=service_a_span_id,
|
||||
parent_span_id="",
|
||||
name="operation-a",
|
||||
kind=TracesKind.SPAN_KIND_SERVER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "service-a",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=2),
|
||||
duration=timedelta(seconds=1),
|
||||
trace_id=service_b_trace_id,
|
||||
span_id=service_b_span_id,
|
||||
parent_span_id="",
|
||||
name="operation-b",
|
||||
kind=TracesKind.SPAN_KIND_SERVER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "service-b",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "jsonl",
|
||||
"source": "traces",
|
||||
"limit": 1000,
|
||||
"filter": "service.name = 'service-a'",
|
||||
}
|
||||
|
||||
# Export traces with filter (GET supports filter param)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/export_raw_data?{urlencode(params)}"
|
||||
),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "application/x-ndjson"
|
||||
|
||||
# Parse JSONL content
|
||||
jsonl_lines = response.text.strip().split("\n")
|
||||
assert len(jsonl_lines) == 1, f"Expected 1 line (filtered), got {len(jsonl_lines)}"
|
||||
|
||||
# Verify the filtered trace
|
||||
filtered_obj = json.loads(jsonl_lines[0])
|
||||
assert filtered_obj["trace_id"] == service_a_trace_id
|
||||
assert filtered_obj["name"] == "operation-a"
|
||||
|
||||
|
||||
def test_export_traces_with_limit(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert 5 traces.
|
||||
|
||||
Tests:
|
||||
1. Export traces with limit applied
|
||||
2. Verify only limited number of traces are returned
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
traces = []
|
||||
for i in range(5):
|
||||
traces.append(
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=i),
|
||||
duration=timedelta(seconds=1),
|
||||
trace_id=TraceIdGenerator.trace_id(),
|
||||
span_id=TraceIdGenerator.span_id(),
|
||||
parent_span_id="",
|
||||
name=f"operation-{i}",
|
||||
kind=TracesKind.SPAN_KIND_SERVER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
},
|
||||
attributes={},
|
||||
)
|
||||
)
|
||||
|
||||
insert_traces(traces)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "csv",
|
||||
"source": "traces",
|
||||
"limit": 3,
|
||||
}
|
||||
|
||||
# Export traces with limit (GET supports limit param)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/export_raw_data?{urlencode(params)}"
|
||||
),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "text/csv"
|
||||
|
||||
# Parse CSV content
|
||||
csv_content = response.text
|
||||
csv_reader = csv.DictReader(io.StringIO(csv_content))
|
||||
|
||||
rows = list(csv_reader)
|
||||
assert len(rows) == 3, f"Expected 3 rows (limited), got {len(rows)}"
|
||||
|
||||
|
||||
def test_export_traces_multiple_queries_rejected(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Tests:
|
||||
1. POST with multiple builder queries but no trace operator is rejected
|
||||
2. Verify 400 error is returned
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
body = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"compositeQuery": {
|
||||
"queries": [
|
||||
{
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"signal": "traces",
|
||||
"name": "A",
|
||||
"limit": 1000,
|
||||
"filter": {"expression": "service.name = 'service-a'"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"signal": "traces",
|
||||
"name": "B",
|
||||
"limit": 1000,
|
||||
"filter": {"expression": "service.name = 'service-b'"},
|
||||
},
|
||||
},
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
url = signoz.self.host_configs["8080"].get("/api/v1/export_raw_data?format=jsonl")
|
||||
response = requests.post(
|
||||
url,
|
||||
json=body,
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.BAD_REQUEST
|
||||
|
||||
|
||||
def test_export_traces_with_composite_query_trace_operator(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert multiple traces with parent-child relationships.
|
||||
|
||||
Tests:
|
||||
1. Export traces using trace operator in composite query (POST)
|
||||
2. Verify trace operator query works correctly
|
||||
"""
|
||||
parent_trace_id = TraceIdGenerator.trace_id()
|
||||
parent_span_id = TraceIdGenerator.span_id()
|
||||
child_span_id_1 = TraceIdGenerator.span_id()
|
||||
child_span_id_2 = TraceIdGenerator.span_id()
|
||||
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_traces(
|
||||
[
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=10),
|
||||
duration=timedelta(seconds=5),
|
||||
trace_id=parent_trace_id,
|
||||
span_id=parent_span_id,
|
||||
parent_span_id="",
|
||||
name="parent-operation",
|
||||
kind=TracesKind.SPAN_KIND_SERVER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "parent-service",
|
||||
},
|
||||
attributes={
|
||||
"operation.type": "parent",
|
||||
},
|
||||
),
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=9),
|
||||
duration=timedelta(seconds=2),
|
||||
trace_id=parent_trace_id,
|
||||
span_id=child_span_id_1,
|
||||
parent_span_id=parent_span_id,
|
||||
name="child-operation-1",
|
||||
kind=TracesKind.SPAN_KIND_INTERNAL,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "parent-service",
|
||||
},
|
||||
attributes={
|
||||
"operation.type": "child",
|
||||
},
|
||||
),
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=7),
|
||||
duration=timedelta(seconds=1),
|
||||
trace_id=parent_trace_id,
|
||||
span_id=child_span_id_2,
|
||||
parent_span_id=parent_span_id,
|
||||
name="child-operation-2",
|
||||
kind=TracesKind.SPAN_KIND_INTERNAL,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "parent-service",
|
||||
},
|
||||
attributes={
|
||||
"operation.type": "child",
|
||||
},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
# A: spans with operation.type = 'parent'
|
||||
query_a = {
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"signal": "traces",
|
||||
"name": "A",
|
||||
"limit": 1000,
|
||||
"filter": {"expression": "operation.type = 'parent'"},
|
||||
},
|
||||
}
|
||||
|
||||
# B: spans with operation.type = 'child'
|
||||
query_b = {
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"signal": "traces",
|
||||
"name": "B",
|
||||
"limit": 1000,
|
||||
"filter": {"expression": "operation.type = 'child'"},
|
||||
},
|
||||
}
|
||||
|
||||
# Trace operator: find traces where A has a direct descendant B
|
||||
query_c = {
|
||||
"type": "builder_trace_operator",
|
||||
"spec": {
|
||||
"name": "C",
|
||||
"expression": "A => B",
|
||||
"returnSpansFrom": "A",
|
||||
"limit": 1000,
|
||||
"order": [{"key": {"name": "timestamp"}, "direction": "desc"}],
|
||||
},
|
||||
}
|
||||
|
||||
body = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": {
|
||||
"queries": [query_a, query_b, query_c],
|
||||
},
|
||||
}
|
||||
|
||||
url = signoz.self.host_configs["8080"].get("/api/v1/export_raw_data?format=jsonl")
|
||||
response = requests.post(
|
||||
url,
|
||||
json=body,
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
|
||||
print(response.text)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "application/x-ndjson"
|
||||
|
||||
# Parse JSONL content
|
||||
jsonl_lines = response.text.strip().split("\n")
|
||||
assert len(jsonl_lines) == 1, f"Expected at least 1 line, got {len(jsonl_lines)}"
|
||||
|
||||
# Verify all returned spans belong to the matched trace
|
||||
json_objects = [json.loads(line) for line in jsonl_lines]
|
||||
trace_ids = [obj.get("trace_id") for obj in json_objects]
|
||||
assert all(tid == parent_trace_id for tid in trace_ids)
|
||||
|
||||
# Verify the parent span (returnSpansFrom = "A") is present
|
||||
span_names = [obj.get("name") for obj in json_objects]
|
||||
assert "parent-operation" in span_names
|
||||
|
||||
|
||||
def test_export_traces_with_select_fields(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert traces with various attributes.
|
||||
|
||||
Tests:
|
||||
1. Export traces with specific select fields via POST
|
||||
2. Verify only specified fields are returned in the output
|
||||
"""
|
||||
trace_id = TraceIdGenerator.trace_id()
|
||||
span_id = TraceIdGenerator.span_id()
|
||||
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_traces(
|
||||
[
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=10),
|
||||
duration=timedelta(seconds=2),
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
parent_span_id="",
|
||||
name="test-operation",
|
||||
kind=TracesKind.SPAN_KIND_SERVER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
"deployment.environment": "production",
|
||||
"host.name": "server-01",
|
||||
},
|
||||
attributes={
|
||||
"http.method": "POST",
|
||||
"http.status_code": "201",
|
||||
"user.id": "user123",
|
||||
},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
body = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": {
|
||||
"queries": [
|
||||
{
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"signal": "traces",
|
||||
"name": "A",
|
||||
"limit": 1000,
|
||||
"selectFields": [
|
||||
{
|
||||
"name": "trace_id",
|
||||
"fieldDataType": "string",
|
||||
"fieldContext": "span",
|
||||
"signal": "traces",
|
||||
},
|
||||
{
|
||||
"name": "span_id",
|
||||
"fieldDataType": "string",
|
||||
"fieldContext": "span",
|
||||
"signal": "traces",
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"fieldDataType": "string",
|
||||
"fieldContext": "span",
|
||||
"signal": "traces",
|
||||
},
|
||||
{
|
||||
"name": "service.name",
|
||||
"fieldDataType": "string",
|
||||
"fieldContext": "resource",
|
||||
"signal": "traces",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
url = signoz.self.host_configs["8080"].get("/api/v1/export_raw_data?format=jsonl")
|
||||
response = requests.post(
|
||||
url,
|
||||
json=body,
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "application/x-ndjson"
|
||||
|
||||
# Parse JSONL content
|
||||
jsonl_lines = response.text.strip().split("\n")
|
||||
assert len(jsonl_lines) == 1
|
||||
|
||||
# Verify the selected fields are present
|
||||
result = json.loads(jsonl_lines[0])
|
||||
assert "trace_id" in result
|
||||
assert "span_id" in result
|
||||
assert "name" in result
|
||||
|
||||
# Verify values
|
||||
assert result["trace_id"] == trace_id
|
||||
assert result["span_id"] == span_id
|
||||
assert result["name"] == "test-operation"
|
||||
Reference in New Issue
Block a user