mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-20 15:52:41 +00:00
Compare commits
17 Commits
ns/ip-filt
...
platform-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
df0c17a006 | ||
|
|
57138c0bac | ||
|
|
678f015e0b | ||
|
|
aa03581be1 | ||
|
|
4c40a36bea | ||
|
|
5a69f16410 | ||
|
|
07afef5c5e | ||
|
|
dcae722b53 | ||
|
|
92b07d15ea | ||
|
|
a0dad1602e | ||
|
|
5cf5b70aca | ||
|
|
db51b23e3d | ||
|
|
80c46b3414 | ||
|
|
2b929421a1 | ||
|
|
2792e20aa2 | ||
|
|
473be1b174 | ||
|
|
6d0c13f9a7 |
4
.github/workflows/integrationci.yaml
vendored
4
.github/workflows/integrationci.yaml
vendored
@@ -53,9 +53,9 @@ jobs:
|
||||
- sqlite
|
||||
clickhouse-version:
|
||||
- 25.5.6
|
||||
- 25.10.1
|
||||
- 25.10.5
|
||||
schema-migrator-version:
|
||||
- v0.129.7
|
||||
- v0.142.0
|
||||
postgres-version:
|
||||
- 15
|
||||
if: |
|
||||
|
||||
@@ -85,6 +85,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return noopgateway.NewProviderFactory()
|
||||
},
|
||||
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
|
||||
return querier.NewHandler(ps, q, a)
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
eequerier "github.com/SigNoz/signoz/ee/querier"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
@@ -124,6 +125,10 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return httpgateway.NewProviderFactory(licensing)
|
||||
},
|
||||
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
|
||||
communityHandler := querier.NewHandler(ps, q, a)
|
||||
return eequerier.NewHandler(ps, q, communityHandler)
|
||||
},
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -92,6 +92,15 @@ components:
|
||||
tokenType:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesGettableTransaction:
|
||||
properties:
|
||||
authorized:
|
||||
type: boolean
|
||||
object:
|
||||
$ref: '#/components/schemas/AuthtypesObject'
|
||||
relation:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesGoogleConfig:
|
||||
properties:
|
||||
allowedGroups:
|
||||
@@ -117,6 +126,8 @@ components:
|
||||
serviceAccountJson:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesName:
|
||||
type: object
|
||||
AuthtypesOIDCConfig:
|
||||
properties:
|
||||
claimMapping:
|
||||
@@ -134,6 +145,16 @@ components:
|
||||
issuerAlias:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesObject:
|
||||
properties:
|
||||
resource:
|
||||
$ref: '#/components/schemas/AuthtypesResource'
|
||||
selector:
|
||||
$ref: '#/components/schemas/AuthtypesSelector'
|
||||
required:
|
||||
- resource
|
||||
- selector
|
||||
type: object
|
||||
AuthtypesOrgSessionContext:
|
||||
properties:
|
||||
authNSupport:
|
||||
@@ -171,6 +192,16 @@ components:
|
||||
refreshToken:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesResource:
|
||||
properties:
|
||||
name:
|
||||
$ref: '#/components/schemas/AuthtypesName'
|
||||
type:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
- type
|
||||
type: object
|
||||
AuthtypesRoleMapping:
|
||||
properties:
|
||||
defaultRole:
|
||||
@@ -196,6 +227,8 @@ components:
|
||||
samlIdp:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesSelector:
|
||||
type: object
|
||||
AuthtypesSessionContext:
|
||||
properties:
|
||||
exists:
|
||||
@@ -206,6 +239,18 @@ components:
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
AuthtypesTransaction:
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
object:
|
||||
$ref: '#/components/schemas/AuthtypesObject'
|
||||
relation:
|
||||
type: string
|
||||
required:
|
||||
- relation
|
||||
- object
|
||||
type: object
|
||||
AuthtypesUpdateableAuthDomain:
|
||||
properties:
|
||||
config:
|
||||
@@ -307,11 +352,16 @@ components:
|
||||
type: string
|
||||
value:
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
- value
|
||||
type: object
|
||||
GatewaytypesGettableCreatedIngestionKeyLimit:
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
type: object
|
||||
GatewaytypesGettableIngestionKeys:
|
||||
properties:
|
||||
@@ -432,6 +482,8 @@ components:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- name
|
||||
type: object
|
||||
GatewaytypesPostableIngestionKeyLimit:
|
||||
properties:
|
||||
@@ -454,6 +506,8 @@ components:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- config
|
||||
type: object
|
||||
MetricsexplorertypesListMetric:
|
||||
properties:
|
||||
@@ -1604,6 +1658,52 @@ components:
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
RoletypesGettableResources:
|
||||
properties:
|
||||
relations:
|
||||
additionalProperties:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
nullable: true
|
||||
type: object
|
||||
resources:
|
||||
items:
|
||||
$ref: '#/components/schemas/AuthtypesResource'
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
RoletypesPatchableObjects:
|
||||
properties:
|
||||
additions:
|
||||
items:
|
||||
$ref: '#/components/schemas/AuthtypesObject'
|
||||
nullable: true
|
||||
type: array
|
||||
deletions:
|
||||
items:
|
||||
$ref: '#/components/schemas/AuthtypesObject'
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- additions
|
||||
- deletions
|
||||
type: object
|
||||
RoletypesPatchableRole:
|
||||
properties:
|
||||
description:
|
||||
nullable: true
|
||||
type: string
|
||||
type: object
|
||||
RoletypesPostableRole:
|
||||
properties:
|
||||
description:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
type: object
|
||||
RoletypesRole:
|
||||
properties:
|
||||
createdAt:
|
||||
@@ -1920,6 +2020,82 @@ components:
|
||||
format: date-time
|
||||
type: string
|
||||
type: object
|
||||
ZeustypesGettableHost:
|
||||
properties:
|
||||
hosts:
|
||||
items:
|
||||
$ref: '#/components/schemas/ZeustypesHost'
|
||||
nullable: true
|
||||
type: array
|
||||
name:
|
||||
type: string
|
||||
state:
|
||||
type: string
|
||||
tier:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
- state
|
||||
- tier
|
||||
- hosts
|
||||
type: object
|
||||
ZeustypesHost:
|
||||
properties:
|
||||
is_default:
|
||||
type: boolean
|
||||
name:
|
||||
type: string
|
||||
url:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
- is_default
|
||||
- url
|
||||
type: object
|
||||
ZeustypesPostableHost:
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
type: object
|
||||
ZeustypesPostableProfile:
|
||||
properties:
|
||||
existing_observability_tool:
|
||||
type: string
|
||||
has_existing_observability_tool:
|
||||
type: boolean
|
||||
logs_scale_per_day_in_gb:
|
||||
format: int64
|
||||
type: integer
|
||||
number_of_hosts:
|
||||
format: int64
|
||||
type: integer
|
||||
number_of_services:
|
||||
format: int64
|
||||
type: integer
|
||||
reasons_for_interest_in_signoz:
|
||||
items:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
timeline_for_migrating_to_signoz:
|
||||
type: string
|
||||
uses_otel:
|
||||
type: boolean
|
||||
where_did_you_discover_signoz:
|
||||
type: string
|
||||
required:
|
||||
- uses_otel
|
||||
- has_existing_observability_tool
|
||||
- existing_observability_tool
|
||||
- reasons_for_interest_in_signoz
|
||||
- logs_scale_per_day_in_gb
|
||||
- number_of_services
|
||||
- number_of_hosts
|
||||
- where_did_you_discover_signoz
|
||||
- timeline_for_migrating_to_signoz
|
||||
type: object
|
||||
securitySchemes:
|
||||
api_key:
|
||||
description: API Keys
|
||||
@@ -1937,6 +2113,41 @@ info:
|
||||
version: ""
|
||||
openapi: 3.0.3
|
||||
paths:
|
||||
/api/v1/authz/check:
|
||||
post:
|
||||
deprecated: false
|
||||
description: Checks if the authenticated user has permissions for given transactions
|
||||
operationId: AuthzCheck
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
items:
|
||||
$ref: '#/components/schemas/AuthtypesTransaction'
|
||||
type: array
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
items:
|
||||
$ref: '#/components/schemas/AuthtypesGettableTransaction'
|
||||
type: array
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
summary: Check permissions
|
||||
tags:
|
||||
- authz
|
||||
/api/v1/changePassword/{id}:
|
||||
post:
|
||||
deprecated: false
|
||||
@@ -3138,12 +3349,29 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- logs
|
||||
@@ -3168,12 +3396,29 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- EDITOR
|
||||
- tokenizer:
|
||||
- EDITOR
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- logs
|
||||
@@ -3732,6 +3977,11 @@ paths:
|
||||
deprecated: false
|
||||
description: This endpoint creates a role
|
||||
operationId: CreateRole
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RoletypesPostableRole'
|
||||
responses:
|
||||
"201":
|
||||
content:
|
||||
@@ -3744,6 +3994,12 @@ paths:
|
||||
type: string
|
||||
type: object
|
||||
description: Created
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
@@ -3756,12 +4012,30 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"409":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Conflict
|
||||
"451":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unavailable For Legal Reasons
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
"501":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Implemented
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
@@ -3800,12 +4074,30 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"451":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unavailable For Legal Reasons
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
"501":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Implemented
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
@@ -3872,6 +4164,11 @@ paths:
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RoletypesPatchableRole'
|
||||
responses:
|
||||
"204":
|
||||
content:
|
||||
@@ -3891,6 +4188,220 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"451":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unavailable For Legal Reasons
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
"501":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Implemented
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Patch role
|
||||
tags:
|
||||
- role
|
||||
/api/v1/roles/{id}/relation/{relation}/objects:
|
||||
get:
|
||||
deprecated: false
|
||||
description: Gets all objects connected to the specified role via a given relation
|
||||
type
|
||||
operationId: GetObjects
|
||||
parameters:
|
||||
- in: path
|
||||
name: id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- in: path
|
||||
name: relation
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
items:
|
||||
$ref: '#/components/schemas/AuthtypesObject'
|
||||
type: array
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"451":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unavailable For Legal Reasons
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
"501":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Implemented
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Get objects for a role by relation
|
||||
tags:
|
||||
- role
|
||||
patch:
|
||||
deprecated: false
|
||||
description: Patches the objects connected to the specified role via a given
|
||||
relation type
|
||||
operationId: PatchObjects
|
||||
parameters:
|
||||
- in: path
|
||||
name: id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- in: path
|
||||
name: relation
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RoletypesPatchableObjects'
|
||||
responses:
|
||||
"204":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
description: No Content
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"451":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unavailable For Legal Reasons
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
"501":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Implemented
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Patch objects for a role by relation
|
||||
tags:
|
||||
- role
|
||||
/api/v1/roles/resources:
|
||||
get:
|
||||
deprecated: false
|
||||
description: Gets all the available resources for role assignment
|
||||
operationId: GetResources
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/RoletypesGettableResources'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
@@ -3902,7 +4413,7 @@ paths:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Patch role
|
||||
summary: Get resources
|
||||
tags:
|
||||
- role
|
||||
/api/v1/user:
|
||||
@@ -4720,6 +5231,7 @@ paths:
|
||||
parameters:
|
||||
- in: query
|
||||
name: name
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
@@ -5538,6 +6050,174 @@ paths:
|
||||
summary: Rotate session
|
||||
tags:
|
||||
- sessions
|
||||
/api/v2/zeus/hosts:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoint gets the host info from zeus.
|
||||
operationId: GetHosts
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/ZeustypesGettableHost'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Get host info from Zeus.
|
||||
tags:
|
||||
- zeus
|
||||
put:
|
||||
deprecated: false
|
||||
description: This endpoint saves the host of a deployment to zeus.
|
||||
operationId: PutHost
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ZeustypesPostableHost'
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"409":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Conflict
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Put host in Zeus for a deployment.
|
||||
tags:
|
||||
- zeus
|
||||
/api/v2/zeus/profiles:
|
||||
put:
|
||||
deprecated: false
|
||||
description: This endpoint saves the profile of a deployment to zeus.
|
||||
operationId: PutProfile
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ZeustypesPostableProfile'
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"409":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Conflict
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Put profile in Zeus for a deployment.
|
||||
tags:
|
||||
- zeus
|
||||
/api/v5/query_range:
|
||||
post:
|
||||
deprecated: false
|
||||
@@ -5932,4 +6612,58 @@ paths:
|
||||
- VIEWER
|
||||
summary: Query range
|
||||
tags:
|
||||
- query
|
||||
- querier
|
||||
/api/v5/substitute_vars:
|
||||
post:
|
||||
deprecated: false
|
||||
description: Replace variables in a query
|
||||
operationId: ReplaceVariables
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Replace variables
|
||||
tags:
|
||||
- querier
|
||||
|
||||
@@ -155,6 +155,7 @@ The `handler.New` function ties the HTTP handler to OpenAPI metadata via `OpenAP
|
||||
- **Request / RequestContentType**:
|
||||
- `Request` is a Go type that describes the request body or form.
|
||||
- `RequestContentType` is usually `"application/json"` or `"application/x-www-form-urlencoded"` (for callbacks like SAML).
|
||||
- **RequestExamples**: An array of `handler.OpenAPIExample` that provide concrete request payloads in the generated spec. See [Adding request examples](#adding-request-examples) below.
|
||||
- **Response / ResponseContentType**:
|
||||
- `Response` is the Go type for the successful response payload.
|
||||
- `ResponseContentType` is usually `"application/json"`; use `""` for responses without a body.
|
||||
@@ -172,8 +173,170 @@ See existing examples in:
|
||||
- `addUserRoutes` (for typical JSON request/response)
|
||||
- `addSessionRoutes` (for form-encoded and redirect flows)
|
||||
|
||||
## OpenAPI schema details for request/response types
|
||||
|
||||
The OpenAPI spec is generated from the Go types you pass as `Request` and `Response` in `OpenAPIDef`. The following struct tags and interfaces control how those types appear in the generated schema.
|
||||
|
||||
### Adding request examples
|
||||
|
||||
Use the `RequestExamples` field in `OpenAPIDef` to provide concrete request payloads. Each example is a `handler.OpenAPIExample`:
|
||||
|
||||
```go
|
||||
type OpenAPIExample struct {
|
||||
Name string // unique key for the example (e.g. "traces_time_series")
|
||||
Summary string // short description shown in docs (e.g. "Time series: count spans grouped by service")
|
||||
Description string // optional longer description
|
||||
Value any // the example payload, typically map[string]any
|
||||
}
|
||||
```
|
||||
|
||||
For reference, see `pkg/apiserver/signozapiserver/querier.go` which defines examples inline for the `/api/v5/query_range` endpoint:
|
||||
|
||||
```go
|
||||
if err := router.Handle("/api/v5/query_range", handler.New(provider.authZ.ViewAccess(provider.querierHandler.QueryRange), handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"querier"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
RequestExamples: []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
// ...
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// ... more examples
|
||||
},
|
||||
// ...
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
### `required` tag
|
||||
|
||||
Use `required:"true"` on struct fields where the property is expected to be **present** in the JSON payload. This is different from the zero value, a field can have its zero value (e.g. `0`, `""`, `false`) and still be required. The `required` tag means the key itself must exist in the JSON object.
|
||||
|
||||
```go
|
||||
type ListItem struct {
|
||||
...
|
||||
}
|
||||
|
||||
type ListResponse struct {
|
||||
List []ListItem `json:"list" required:"true" nullable:"true"`
|
||||
Total uint64 `json:"total" required:"true"`
|
||||
}
|
||||
```
|
||||
|
||||
In this example, a response like `{"list": null, "total": 0}` is valid. Both keys are present (satisfying `required`), `total` has its zero value, and `list` is null (allowed by `nullable`). But `{"total": 0}` would violate the schema because the `list` key is missing.
|
||||
|
||||
### `nullable` tag
|
||||
|
||||
Use `nullable:"true"` on struct fields that can be `null` in the JSON payload. This is especially important for **slice and map fields** because in Go, the zero value for these types is `nil`, which serializes to `null` in JSON (not `[]` or `{}`).
|
||||
|
||||
Be explicit about the distinction:
|
||||
|
||||
- **Nullable list** (`nullable:"true"`): the field can be `null`. Use this when the Go code may return `nil` for the slice.
|
||||
- **Non-nullable list** (no `nullable` tag): the field is always an array, never `null`. Ensure the Go code initializes it to an empty slice (e.g. `make([]T, 0)`) before serializing.
|
||||
|
||||
```go
|
||||
// Non-nullable: Go code must ensure this is always an initialized slice.
|
||||
type NonNullableExample struct {
|
||||
Items []Item `json:"items" required:"true"`
|
||||
}
|
||||
```
|
||||
|
||||
When defining your types, ask yourself: "Can this field be `null` in the JSON response, or is it always an array/object?" If the Go code ever returns a `nil` slice or map, mark it `nullable:"true"`.
|
||||
|
||||
### `Enum()` method
|
||||
|
||||
For types that have a fixed set of acceptable values, implement the `Enum() []any` method. This generates an `enum` constraint in the JSON schema so the OpenAPI spec accurately restricts the values.
|
||||
|
||||
```go
|
||||
type Signal struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
SignalTraces = Signal{valuer.NewString("traces")}
|
||||
SignalLogs = Signal{valuer.NewString("logs")}
|
||||
SignalMetrics = Signal{valuer.NewString("metrics")}
|
||||
)
|
||||
|
||||
func (Signal) Enum() []any {
|
||||
return []any{
|
||||
SignalTraces,
|
||||
SignalLogs,
|
||||
SignalMetrics,
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This produces the following in the generated OpenAPI spec:
|
||||
|
||||
```yaml
|
||||
Signal:
|
||||
enum:
|
||||
- traces
|
||||
- logs
|
||||
- metrics
|
||||
type: string
|
||||
```
|
||||
|
||||
Every type with a known set of values **must** implement `Enum()`. Without it, the JSON schema will only show the base type (e.g. `string`) with no value constraints.
|
||||
|
||||
### `JSONSchema()` method (custom schema)
|
||||
|
||||
For types that need a completely custom JSON schema (for example, a field that accepts either a string or a number), implement the `jsonschema.Exposer` interface:
|
||||
|
||||
```go
|
||||
var _ jsonschema.Exposer = Step{}
|
||||
|
||||
func (Step) JSONSchema() (jsonschema.Schema, error) {
|
||||
s := jsonschema.Schema{}
|
||||
s.WithDescription("Step interval. Accepts a duration string or seconds.")
|
||||
|
||||
strSchema := jsonschema.Schema{}
|
||||
strSchema.WithType(jsonschema.String.Type())
|
||||
strSchema.WithExamples("60s", "5m", "1h")
|
||||
|
||||
numSchema := jsonschema.Schema{}
|
||||
numSchema.WithType(jsonschema.Number.Type())
|
||||
numSchema.WithExamples(60, 300, 3600)
|
||||
|
||||
s.OneOf = []jsonschema.SchemaOrBool{
|
||||
strSchema.ToSchemaOrBool(),
|
||||
numSchema.ToSchemaOrBool(),
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- **Keep handlers thin**: focus on HTTP concerns and delegate logic to modules/services.
|
||||
- **Always register routes through `signozapiserver`** using `handler.New` and a complete `OpenAPIDef`.
|
||||
- **Choose accurate request/response types** from the `types` packages so OpenAPI schemas are correct.
|
||||
- **Add `required:"true"`** on fields where the key must be present in the JSON (this is about key presence, not about the zero value).
|
||||
- **Add `nullable:"true"`** on fields that can be `null`. Pay special attention to slices and maps -- in Go these default to `nil` which serializes to `null`. If the field should always be an array, initialize it and do not mark it nullable.
|
||||
- **Implement `Enum()`** on every type that has a fixed set of acceptable values so the JSON schema generates proper `enum` constraints.
|
||||
- **Add request examples** via `RequestExamples` in `OpenAPIDef` for any non-trivial endpoint. See `pkg/apiserver/signozapiserver/querier.go` for reference.
|
||||
|
||||
@@ -62,10 +62,6 @@ func (provider *provider) Stop(ctx context.Context) error {
|
||||
return provider.openfgaServer.Stop(ctx)
|
||||
}
|
||||
|
||||
func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
|
||||
return provider.openfgaServer.Check(ctx, tuple)
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
|
||||
return provider.openfgaServer.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
|
||||
}
|
||||
@@ -74,8 +70,8 @@ func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Contex
|
||||
return provider.openfgaServer.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
|
||||
}
|
||||
|
||||
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
|
||||
return provider.openfgaServer.BatchCheck(ctx, tuples)
|
||||
func (provider *provider) BatchCheck(ctx context.Context, tupleReq map[string]*openfgav1.TupleKey) (map[string]*authtypes.TupleKeyAuthorization, error) {
|
||||
return provider.openfgaServer.BatchCheck(ctx, tupleReq)
|
||||
}
|
||||
|
||||
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
|
||||
@@ -187,6 +183,11 @@ func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource
|
||||
}
|
||||
|
||||
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
|
||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
storableRole, err := provider.store.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -198,7 +199,7 @@ func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id
|
||||
resourceObjects, err := provider.
|
||||
ListObjects(
|
||||
ctx,
|
||||
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
|
||||
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.Name, orgID, &authtypes.RelationAssignee),
|
||||
relation,
|
||||
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
|
||||
)
|
||||
|
||||
@@ -2,8 +2,10 @@ package openfgaserver
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
@@ -28,27 +30,34 @@ func (server *Server) Stop(ctx context.Context) error {
|
||||
return server.pkgAuthzService.Stop(ctx)
|
||||
}
|
||||
|
||||
func (server *Server) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
|
||||
return server.pkgAuthzService.Check(ctx, tuple)
|
||||
}
|
||||
|
||||
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
tupleSlice, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = server.BatchCheck(ctx, tuples)
|
||||
tuples := make(map[string]*openfgav1.TupleKey, len(tupleSlice))
|
||||
for idx, tuple := range tupleSlice {
|
||||
tuples[strconv.Itoa(idx)] = tuple
|
||||
}
|
||||
|
||||
response, err := server.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
for _, resp := range response {
|
||||
if resp.Authorized {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
|
||||
}
|
||||
|
||||
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
|
||||
@@ -57,21 +66,32 @@ func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, o
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
tupleSlice, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = server.BatchCheck(ctx, tuples)
|
||||
tuples := make(map[string]*openfgav1.TupleKey, len(tupleSlice))
|
||||
for idx, tuple := range tupleSlice {
|
||||
tuples[strconv.Itoa(idx)] = tuple
|
||||
}
|
||||
|
||||
response, err := server.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
for _, resp := range response {
|
||||
if resp.Authorized {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
|
||||
}
|
||||
|
||||
func (server *Server) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
|
||||
return server.pkgAuthzService.BatchCheck(ctx, tuples)
|
||||
func (server *Server) BatchCheck(ctx context.Context, tupleReq map[string]*openfgav1.TupleKey) (map[string]*authtypes.TupleKeyAuthorization, error) {
|
||||
return server.pkgAuthzService.BatchCheck(ctx, tupleReq)
|
||||
}
|
||||
|
||||
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
|
||||
|
||||
@@ -220,6 +220,7 @@ func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.T
|
||||
return map[string][]*authtypes.Transaction{
|
||||
roletypes.SigNozAnonymousRoleName: {
|
||||
{
|
||||
ID: valuer.GenerateUUID(),
|
||||
Relation: authtypes.RelationRead,
|
||||
Object: *authtypes.MustNewObject(
|
||||
authtypes.Resource{
|
||||
|
||||
178
ee/querier/handler.go
Normal file
178
ee/querier/handler.go
Normal file
@@ -0,0 +1,178 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"runtime/debug"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
set factory.ProviderSettings
|
||||
querier querier.Querier
|
||||
community querier.Handler
|
||||
}
|
||||
|
||||
func NewHandler(set factory.ProviderSettings, querier querier.Querier, community querier.Handler) querier.Handler {
|
||||
return &handler{
|
||||
set: set,
|
||||
querier: querier,
|
||||
community: community,
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
bodyBytes, err := io.ReadAll(req.Body)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
|
||||
return
|
||||
}
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
|
||||
ctx := req.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
stackTrace := string(debug.Stack())
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
h.set.Logger.ErrorContext(ctx, "panic in QueryRange",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
"stacktrace", stackTrace,
|
||||
)
|
||||
|
||||
render.Error(rw, errors.NewInternalf(
|
||||
errors.CodeInternal,
|
||||
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
|
||||
))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := queryRangeRequest.Validate(); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
|
||||
anomalies, err := h.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
results := []any{}
|
||||
for _, item := range anomalies.Results {
|
||||
results = append(results, item)
|
||||
}
|
||||
|
||||
// Build step intervals from the anomaly query
|
||||
stepIntervals := make(map[string]uint64)
|
||||
if anomalyQuery.StepInterval.Duration > 0 {
|
||||
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
|
||||
}
|
||||
|
||||
finalResp := &qbtypes.QueryRangeResponse{
|
||||
Type: queryRangeRequest.RequestType,
|
||||
Data: qbtypes.QueryData{
|
||||
Results: results,
|
||||
},
|
||||
Meta: qbtypes.ExecStats{
|
||||
StepIntervals: stepIntervals,
|
||||
},
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, finalResp)
|
||||
return
|
||||
}
|
||||
|
||||
// regular query range request, delegate to community handler
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
h.community.QueryRange(rw, req)
|
||||
}
|
||||
|
||||
func (h *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
h.community.QueryRawStream(rw, req)
|
||||
}
|
||||
|
||||
func (h *handler) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
h.community.ReplaceVariables(rw, req)
|
||||
}
|
||||
|
||||
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
|
||||
for _, fn := range anomalyQuery.Functions {
|
||||
if fn.Name == qbtypes.FunctionNameAnomaly {
|
||||
for _, arg := range fn.Args {
|
||||
if arg.Name == "seasonality" {
|
||||
if seasonalityStr, ok := arg.Value.(string); ok {
|
||||
switch seasonalityStr {
|
||||
case "weekly":
|
||||
return anomalyV2.SeasonalityWeekly
|
||||
case "hourly":
|
||||
return anomalyV2.SeasonalityHourly
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return anomalyV2.SeasonalityDaily // default
|
||||
}
|
||||
|
||||
func (h *handler) createAnomalyProvider(seasonality anomalyV2.Seasonality) anomalyV2.Provider {
|
||||
switch seasonality {
|
||||
case anomalyV2.SeasonalityWeekly:
|
||||
return anomalyV2.NewWeeklyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](h.set.Logger),
|
||||
)
|
||||
case anomalyV2.SeasonalityHourly:
|
||||
return anomalyV2.NewHourlyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](h.set.Logger),
|
||||
)
|
||||
default:
|
||||
return anomalyV2.NewDailyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.DailyProvider](h.set.Logger),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
|
||||
seasonality := extractSeasonality(anomalyQuery)
|
||||
provider := h.createAnomalyProvider(seasonality)
|
||||
|
||||
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
|
||||
}
|
||||
@@ -2,17 +2,13 @@ package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
@@ -33,7 +29,6 @@ type APIHandlerOptions struct {
|
||||
IntegrationsController *integrations.Controller
|
||||
CloudIntegrationsController *cloudintegrations.Controller
|
||||
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
|
||||
Gateway *httputil.ReverseProxy
|
||||
GatewayUrl string
|
||||
// Querier Influx Interval
|
||||
FluxInterval time.Duration
|
||||
@@ -57,7 +52,6 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
}, config)
|
||||
|
||||
@@ -80,10 +74,6 @@ func (ah *APIHandler) UM() *usage.Manager {
|
||||
return ah.opts.UsageManager
|
||||
}
|
||||
|
||||
func (ah *APIHandler) Gateway() *httputil.ReverseProxy {
|
||||
return ah.opts.Gateway
|
||||
}
|
||||
|
||||
// RegisterRoutes registers routes for this handler on the given router
|
||||
func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// note: add ee override methods first
|
||||
@@ -106,17 +96,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// v4
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
// v5
|
||||
router.Handle("/api/v5/query_range", handler.New(
|
||||
am.ViewAccess(ah.queryRangeV5),
|
||||
querierAPI.QueryRangeV5OpenAPIDef,
|
||||
)).Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
|
||||
|
||||
// Gateway
|
||||
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.EditAccess(ah.ServeGatewayHTTP))
|
||||
|
||||
ah.APIHandler.RegisterRoutes(router, am)
|
||||
|
||||
}
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
validPath := false
|
||||
for _, allowedPrefix := range gateway.AllowedPrefix {
|
||||
if strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+allowedPrefix) {
|
||||
validPath = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !validPath {
|
||||
rw.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
//Create headers
|
||||
var licenseKey string
|
||||
if license != nil {
|
||||
licenseKey = license.Key
|
||||
}
|
||||
|
||||
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
|
||||
req.Header.Set("X-Consumer-Username", "lid:00000000-0000-0000-0000-000000000000")
|
||||
req.Header.Set("X-Consumer-Groups", "ns:default")
|
||||
|
||||
ah.Gateway().ServeHTTP(rw, req)
|
||||
}
|
||||
@@ -2,16 +2,11 @@ package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"runtime/debug"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
@@ -20,8 +15,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
@@ -144,140 +137,3 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
}
|
||||
|
||||
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
|
||||
for _, fn := range anomalyQuery.Functions {
|
||||
if fn.Name == qbtypes.FunctionNameAnomaly {
|
||||
for _, arg := range fn.Args {
|
||||
if arg.Name == "seasonality" {
|
||||
if seasonalityStr, ok := arg.Value.(string); ok {
|
||||
switch seasonalityStr {
|
||||
case "weekly":
|
||||
return anomalyV2.SeasonalityWeekly
|
||||
case "hourly":
|
||||
return anomalyV2.SeasonalityHourly
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return anomalyV2.SeasonalityDaily // default
|
||||
}
|
||||
|
||||
func createAnomalyProvider(aH *APIHandler, seasonality anomalyV2.Seasonality) anomalyV2.Provider {
|
||||
switch seasonality {
|
||||
case anomalyV2.SeasonalityWeekly:
|
||||
return anomalyV2.NewWeeklyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
case anomalyV2.SeasonalityHourly:
|
||||
return anomalyV2.NewHourlyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
default:
|
||||
return anomalyV2.NewDailyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.DailyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
|
||||
seasonality := extractSeasonality(anomalyQuery)
|
||||
provider := createAnomalyProvider(aH, seasonality)
|
||||
|
||||
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
|
||||
}
|
||||
|
||||
func (aH *APIHandler) queryRangeV5(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
bodyBytes, err := io.ReadAll(req.Body)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
|
||||
return
|
||||
}
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
|
||||
ctx := req.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
stackTrace := string(debug.Stack())
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
aH.Signoz.Instrumentation.Logger().ErrorContext(ctx, "panic in QueryRange",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
"stacktrace", stackTrace,
|
||||
)
|
||||
|
||||
render.Error(rw, errors.NewInternalf(
|
||||
errors.CodeInternal,
|
||||
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
|
||||
))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := queryRangeRequest.Validate(); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
|
||||
anomalies, err := aH.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
results := []any{}
|
||||
for _, item := range anomalies.Results {
|
||||
results = append(results, item)
|
||||
}
|
||||
|
||||
// Build step intervals from the anomaly query
|
||||
stepIntervals := make(map[string]uint64)
|
||||
if anomalyQuery.StepInterval.Duration > 0 {
|
||||
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
|
||||
}
|
||||
|
||||
finalResp := &qbtypes.QueryRangeResponse{
|
||||
Type: queryRangeRequest.RequestType,
|
||||
Data: qbtypes.QueryData{
|
||||
Results: results,
|
||||
},
|
||||
Meta: qbtypes.ExecStats{
|
||||
StepIntervals: stepIntervals,
|
||||
},
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, finalResp)
|
||||
return
|
||||
} else {
|
||||
// regular query range request, let the querier handle it
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
aH.QuerierAPI.QueryRange(rw, req)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,7 +19,6 @@ import (
|
||||
"github.com/gorilla/handlers"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/api"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/rules"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
@@ -72,11 +71,6 @@ type Server struct {
|
||||
|
||||
// NewServer creates and initializes Server
|
||||
func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
gatewayProxy, err := gateway.NewProxy(config.Gateway.URL.String(), gateway.RoutePrefix)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
|
||||
Provider: "memory",
|
||||
Memory: cache.Memory{
|
||||
@@ -170,7 +164,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
CloudIntegrationsController: cloudIntegrationsController,
|
||||
LogsParsingPipelineController: logParsingPipelineController,
|
||||
FluxInterval: config.Querier.FluxInterval,
|
||||
Gateway: gatewayProxy,
|
||||
GatewayUrl: config.Gateway.URL.String(),
|
||||
GlobalConfig: config.Global,
|
||||
}
|
||||
@@ -240,7 +233,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.RegisterQueryRangeV3Routes(r, am)
|
||||
apiHandler.RegisterInfraMetricsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV4Routes(r, am)
|
||||
apiHandler.RegisterQueryRangeV5Routes(r, am)
|
||||
apiHandler.RegisterWebSocketPaths(r, am)
|
||||
apiHandler.RegisterMessagingQueuesRoutes(r, am)
|
||||
apiHandler.RegisterThirdPartyApiRoutes(r, am)
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"net/http/httputil"
|
||||
)
|
||||
|
||||
func NewNoopProxy() (*httputil.ReverseProxy, error) {
|
||||
return &httputil.ReverseProxy{}, nil
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"net/url"
|
||||
"path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
RoutePrefix string = "/api/gateway"
|
||||
AllowedPrefix []string = []string{"/v1/workspaces/me", "/v2/profiles/me", "/v2/deployments/me"}
|
||||
)
|
||||
|
||||
type proxy struct {
|
||||
url *url.URL
|
||||
stripPath string
|
||||
}
|
||||
|
||||
func NewProxy(u string, stripPath string) (*httputil.ReverseProxy, error) {
|
||||
url, err := url.Parse(u)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
proxy := &proxy{url: url, stripPath: stripPath}
|
||||
|
||||
return &httputil.ReverseProxy{
|
||||
Rewrite: proxy.rewrite,
|
||||
ModifyResponse: proxy.modifyResponse,
|
||||
ErrorHandler: proxy.errorHandler,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (p *proxy) rewrite(pr *httputil.ProxyRequest) {
|
||||
pr.SetURL(p.url)
|
||||
pr.SetXForwarded()
|
||||
pr.Out.URL.Path = cleanPath(strings.ReplaceAll(pr.Out.URL.Path, p.stripPath, ""))
|
||||
}
|
||||
|
||||
func (p *proxy) modifyResponse(res *http.Response) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *proxy) errorHandler(rw http.ResponseWriter, req *http.Request, err error) {
|
||||
rw.WriteHeader(http.StatusBadGateway)
|
||||
}
|
||||
|
||||
func cleanPath(p string) string {
|
||||
if p == "" {
|
||||
return "/"
|
||||
}
|
||||
if p[0] != '/' {
|
||||
p = "/" + p
|
||||
}
|
||||
np := path.Clean(p)
|
||||
if p[len(p)-1] == '/' && np != "/" {
|
||||
if len(p) == len(np)+1 && strings.HasPrefix(p, np) {
|
||||
np = p
|
||||
} else {
|
||||
np += "/"
|
||||
}
|
||||
}
|
||||
return np
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"net/url"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestProxyRewrite(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
url *url.URL
|
||||
stripPath string
|
||||
in *url.URL
|
||||
expected *url.URL
|
||||
}{
|
||||
{
|
||||
name: "SamePathAdded",
|
||||
url: &url.URL{Scheme: "http", Host: "backend", Path: "/path1"},
|
||||
stripPath: "/strip",
|
||||
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
|
||||
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/path1/path1"},
|
||||
},
|
||||
{
|
||||
name: "NoStripPathInput",
|
||||
url: &url.URL{Scheme: "http", Host: "backend"},
|
||||
stripPath: "",
|
||||
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
|
||||
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
|
||||
},
|
||||
{
|
||||
name: "NoStripPathPresentInReq",
|
||||
url: &url.URL{Scheme: "http", Host: "backend"},
|
||||
stripPath: "/not-found",
|
||||
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
|
||||
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
proxy, err := NewProxy(tc.url.String(), tc.stripPath)
|
||||
require.NoError(t, err)
|
||||
inReq, err := http.NewRequest(http.MethodGet, tc.in.String(), nil)
|
||||
require.NoError(t, err)
|
||||
proxyReq := &httputil.ProxyRequest{
|
||||
In: inReq,
|
||||
Out: inReq.Clone(context.Background()),
|
||||
}
|
||||
proxy.Rewrite(proxyReq)
|
||||
|
||||
assert.Equal(t, tc.expected.Host, proxyReq.Out.URL.Host)
|
||||
assert.Equal(t, tc.expected.Scheme, proxyReq.Out.URL.Scheme)
|
||||
assert.Equal(t, tc.expected.Path, proxyReq.Out.URL.Path)
|
||||
assert.Equal(t, tc.expected.Query(), proxyReq.Out.URL.Query())
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ package httpzeus
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
@@ -10,6 +11,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/client"
|
||||
"github.com/SigNoz/signoz/pkg/types/zeustypes"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
@@ -119,8 +121,13 @@ func (provider *Provider) PutMeters(ctx context.Context, key string, data []byte
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *Provider) PutProfile(ctx context.Context, key string, body []byte) error {
|
||||
_, err := provider.do(
|
||||
func (provider *Provider) PutProfile(ctx context.Context, key string, profile *zeustypes.PostableProfile) error {
|
||||
body, err := json.Marshal(profile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/profiles/me"),
|
||||
http.MethodPut,
|
||||
@@ -131,10 +138,15 @@ func (provider *Provider) PutProfile(ctx context.Context, key string, body []byt
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *Provider) PutHost(ctx context.Context, key string, body []byte) error {
|
||||
_, err := provider.do(
|
||||
func (provider *Provider) PutHost(ctx context.Context, key string, host *zeustypes.PostableHost) error {
|
||||
body, err := json.Marshal(host)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/deployments/me/hosts"),
|
||||
provider.config.URL.JoinPath("/v2/deployments/me/host"),
|
||||
http.MethodPut,
|
||||
key,
|
||||
body,
|
||||
@@ -169,21 +181,28 @@ func (provider *Provider) do(ctx context.Context, url *url.URL, method string, k
|
||||
return body, nil
|
||||
}
|
||||
|
||||
return nil, provider.errFromStatusCode(response.StatusCode)
|
||||
errorMessage := gjson.GetBytes(body, "error").String()
|
||||
if errorMessage == "" {
|
||||
errorMessage = "an unknown error occurred"
|
||||
}
|
||||
|
||||
return nil, provider.errFromStatusCode(response.StatusCode, errorMessage)
|
||||
}
|
||||
|
||||
// This can be taken down to the client package
|
||||
func (provider *Provider) errFromStatusCode(statusCode int) error {
|
||||
func (provider *Provider) errFromStatusCode(statusCode int, errorMessage string) error {
|
||||
switch statusCode {
|
||||
case http.StatusBadRequest:
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "bad request")
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, errorMessage)
|
||||
case http.StatusUnauthorized:
|
||||
return errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated")
|
||||
return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, errorMessage)
|
||||
case http.StatusForbidden:
|
||||
return errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "forbidden")
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, errorMessage)
|
||||
case http.StatusNotFound:
|
||||
return errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "not found")
|
||||
return errors.New(errors.TypeNotFound, errors.CodeNotFound, errorMessage)
|
||||
case http.StatusConflict:
|
||||
return errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, errorMessage)
|
||||
}
|
||||
|
||||
return errors.Newf(errors.TypeInternal, errors.CodeInternal, "internal")
|
||||
return errors.New(errors.TypeInternal, errors.CodeInternal, errorMessage)
|
||||
}
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
CreateIngestionKeyProps,
|
||||
IngestionKeyProps,
|
||||
} from 'types/api/ingestionKeys/types';
|
||||
|
||||
const createIngestionKey = async (
|
||||
props: CreateIngestionKeyProps,
|
||||
): Promise<SuccessResponse<IngestionKeyProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.post('/workspaces/me/keys', {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default createIngestionKey;
|
||||
@@ -1,26 +0,0 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
|
||||
|
||||
const deleteIngestionKey = async (
|
||||
id: string,
|
||||
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.delete(
|
||||
`/workspaces/me/keys/${id}`,
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default deleteIngestionKey;
|
||||
@@ -1,21 +0,0 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import {
|
||||
AllIngestionKeyProps,
|
||||
GetIngestionKeyProps,
|
||||
} from 'types/api/ingestionKeys/types';
|
||||
|
||||
export const getAllIngestionKeys = (
|
||||
props: GetIngestionKeyProps,
|
||||
): Promise<AxiosResponse<AllIngestionKeyProps>> => {
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
const { search, per_page, page } = props;
|
||||
|
||||
const BASE_URL = '/workspaces/me/keys';
|
||||
const URL_QUERY_PARAMS =
|
||||
search && search.length > 0
|
||||
? `/search?name=${search}&page=1&per_page=100`
|
||||
: `?page=${page}&per_page=${per_page}`;
|
||||
|
||||
return GatewayApiV1Instance.get(`${BASE_URL}${URL_QUERY_PARAMS}`);
|
||||
};
|
||||
@@ -1,65 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-throw-literal */
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import axios from 'axios';
|
||||
import {
|
||||
AddLimitProps,
|
||||
LimitSuccessProps,
|
||||
} from 'types/api/ingestionKeys/limits/types';
|
||||
|
||||
interface SuccessResponse<T> {
|
||||
statusCode: number;
|
||||
error: null;
|
||||
message: string;
|
||||
payload: T;
|
||||
}
|
||||
|
||||
interface ErrorResponse {
|
||||
statusCode: number;
|
||||
error: string;
|
||||
message: string;
|
||||
payload: null;
|
||||
}
|
||||
|
||||
const createLimitForIngestionKey = async (
|
||||
props: AddLimitProps,
|
||||
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.post(
|
||||
`/workspaces/me/keys/${props.keyID}/limits`,
|
||||
{
|
||||
...props,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
// Axios error
|
||||
const errResponse: ErrorResponse = {
|
||||
statusCode: error.response?.status || 500,
|
||||
error: error.response?.data?.error,
|
||||
message: error.response?.data?.status || 'An error occurred',
|
||||
payload: null,
|
||||
};
|
||||
|
||||
throw errResponse;
|
||||
} else {
|
||||
// Non-Axios error
|
||||
const errResponse: ErrorResponse = {
|
||||
statusCode: 500,
|
||||
error: 'Unknown error',
|
||||
message: 'An unknown error occurred',
|
||||
payload: null,
|
||||
};
|
||||
|
||||
throw errResponse;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default createLimitForIngestionKey;
|
||||
@@ -1,26 +0,0 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
|
||||
|
||||
const deleteLimitsForIngestionKey = async (
|
||||
id: string,
|
||||
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.delete(
|
||||
`/workspaces/me/limits/${id}`,
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default deleteLimitsForIngestionKey;
|
||||
@@ -1,65 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-throw-literal */
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import axios from 'axios';
|
||||
import {
|
||||
LimitSuccessProps,
|
||||
UpdateLimitProps,
|
||||
} from 'types/api/ingestionKeys/limits/types';
|
||||
|
||||
interface SuccessResponse<T> {
|
||||
statusCode: number;
|
||||
error: null;
|
||||
message: string;
|
||||
payload: T;
|
||||
}
|
||||
|
||||
interface ErrorResponse {
|
||||
statusCode: number;
|
||||
error: string;
|
||||
message: string;
|
||||
payload: null;
|
||||
}
|
||||
|
||||
const updateLimitForIngestionKey = async (
|
||||
props: UpdateLimitProps,
|
||||
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.patch(
|
||||
`/workspaces/me/limits/${props.limitID}`,
|
||||
{
|
||||
config: props.config,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
// Axios error
|
||||
const errResponse: ErrorResponse = {
|
||||
statusCode: error.response?.status || 500,
|
||||
error: error.response?.data?.error,
|
||||
message: error.response?.data?.status || 'An error occurred',
|
||||
payload: null,
|
||||
};
|
||||
|
||||
throw errResponse;
|
||||
} else {
|
||||
// Non-Axios error
|
||||
const errResponse: ErrorResponse = {
|
||||
statusCode: 500,
|
||||
error: 'Unknown error',
|
||||
message: 'An unknown error occurred',
|
||||
payload: null,
|
||||
};
|
||||
|
||||
throw errResponse;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default updateLimitForIngestionKey;
|
||||
@@ -1,32 +0,0 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
IngestionKeysPayloadProps,
|
||||
UpdateIngestionKeyProps,
|
||||
} from 'types/api/ingestionKeys/types';
|
||||
|
||||
const updateIngestionKey = async (
|
||||
props: UpdateIngestionKeyProps,
|
||||
): Promise<SuccessResponse<IngestionKeysPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.patch(
|
||||
`/workspaces/me/keys/${props.id}`,
|
||||
{
|
||||
...props.data,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default updateIngestionKey;
|
||||
@@ -4,8 +4,6 @@ export const apiV2 = '/api/v2/';
|
||||
export const apiV3 = '/api/v3/';
|
||||
export const apiV4 = '/api/v4/';
|
||||
export const apiV5 = '/api/v5/';
|
||||
export const gatewayApiV1 = '/api/gateway/v1/';
|
||||
export const gatewayApiV2 = '/api/gateway/v2/';
|
||||
export const apiAlertManager = '/api/alertmanager/';
|
||||
|
||||
export default apiV1;
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
import { GatewayApiV2Instance as axios } from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { DeploymentsDataProps } from 'types/api/customDomain/types';
|
||||
|
||||
export const getDeploymentsData = (): Promise<
|
||||
AxiosResponse<DeploymentsDataProps>
|
||||
> => axios.get(`/deployments/me`);
|
||||
@@ -1,16 +0,0 @@
|
||||
import { GatewayApiV2Instance as axios } from 'api';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import {
|
||||
PayloadProps,
|
||||
UpdateCustomDomainProps,
|
||||
} from 'types/api/customDomain/types';
|
||||
|
||||
const updateSubDomainAPI = async (
|
||||
props: UpdateCustomDomainProps,
|
||||
): Promise<SuccessResponse<PayloadProps> | AxiosError> =>
|
||||
axios.put(`/deployments/me/host`, {
|
||||
...props.data,
|
||||
});
|
||||
|
||||
export default updateSubDomainAPI;
|
||||
107
frontend/src/api/generated/services/authz/index.ts
Normal file
107
frontend/src/api/generated/services/authz/index.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
/**
|
||||
* ! Do not edit manually
|
||||
* * The file has been auto-generated using Orval for SigNoz
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import type {
|
||||
MutationFunction,
|
||||
UseMutationOptions,
|
||||
UseMutationResult,
|
||||
} from 'react-query';
|
||||
import { useMutation } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
AuthtypesTransactionDTO,
|
||||
AuthzCheck200,
|
||||
RenderErrorResponseDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* Checks if the authenticated user has permissions for given transactions
|
||||
* @summary Check permissions
|
||||
*/
|
||||
export const authzCheck = (
|
||||
authtypesTransactionDTO: AuthtypesTransactionDTO[],
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<AuthzCheck200>({
|
||||
url: `/api/v1/authz/check`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: authtypesTransactionDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getAuthzCheckMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof authzCheck>>,
|
||||
TError,
|
||||
{ data: AuthtypesTransactionDTO[] },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof authzCheck>>,
|
||||
TError,
|
||||
{ data: AuthtypesTransactionDTO[] },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['authzCheck'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof authzCheck>>,
|
||||
{ data: AuthtypesTransactionDTO[] }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return authzCheck(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type AuthzCheckMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof authzCheck>>
|
||||
>;
|
||||
export type AuthzCheckMutationBody = AuthtypesTransactionDTO[];
|
||||
export type AuthzCheckMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Check permissions
|
||||
*/
|
||||
export const useAuthzCheck = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof authzCheck>>,
|
||||
TError,
|
||||
{ data: AuthtypesTransactionDTO[] },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof authzCheck>>,
|
||||
TError,
|
||||
{ data: AuthtypesTransactionDTO[] },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getAuthzCheckMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
@@ -678,7 +678,7 @@ export const useUpdateIngestionKeyLimit = <
|
||||
* @summary Search ingestion keys for workspace
|
||||
*/
|
||||
export const searchIngestionKeys = (
|
||||
params?: SearchIngestionKeysParams,
|
||||
params: SearchIngestionKeysParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<SearchIngestionKeys200>({
|
||||
@@ -699,7 +699,7 @@ export const getSearchIngestionKeysQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: SearchIngestionKeysParams,
|
||||
params: SearchIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
@@ -737,7 +737,7 @@ export function useSearchIngestionKeys<
|
||||
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: SearchIngestionKeysParams,
|
||||
params: SearchIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
@@ -762,7 +762,7 @@ export function useSearchIngestionKeys<
|
||||
*/
|
||||
export const invalidateSearchIngestionKeys = async (
|
||||
queryClient: QueryClient,
|
||||
params?: SearchIngestionKeysParams,
|
||||
params: SearchIngestionKeysParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
|
||||
@@ -16,6 +16,7 @@ import type {
|
||||
Querybuildertypesv5QueryRangeRequestDTO,
|
||||
QueryRangeV5200,
|
||||
RenderErrorResponseDTO,
|
||||
ReplaceVariables200,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
@@ -105,3 +106,86 @@ export const useQueryRangeV5 = <
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* Replace variables in a query
|
||||
* @summary Replace variables
|
||||
*/
|
||||
export const replaceVariables = (
|
||||
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<ReplaceVariables200>({
|
||||
url: `/api/v5/substitute_vars`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getReplaceVariablesMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['replaceVariables'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return replaceVariables(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type ReplaceVariablesMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof replaceVariables>>
|
||||
>;
|
||||
export type ReplaceVariablesMutationBody = Querybuildertypesv5QueryRangeRequestDTO;
|
||||
export type ReplaceVariablesMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Replace variables
|
||||
*/
|
||||
export const useReplaceVariables = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getReplaceVariablesMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
@@ -21,11 +21,18 @@ import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
CreateRole201,
|
||||
DeleteRolePathParameters,
|
||||
GetObjects200,
|
||||
GetObjectsPathParameters,
|
||||
GetResources200,
|
||||
GetRole200,
|
||||
GetRolePathParameters,
|
||||
ListRoles200,
|
||||
PatchObjectsPathParameters,
|
||||
PatchRolePathParameters,
|
||||
RenderErrorResponseDTO,
|
||||
RoletypesPatchableObjectsDTO,
|
||||
RoletypesPatchableRoleDTO,
|
||||
RoletypesPostableRoleDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
@@ -114,10 +121,15 @@ export const invalidateListRoles = async (
|
||||
* This endpoint creates a role
|
||||
* @summary Create role
|
||||
*/
|
||||
export const createRole = (signal?: AbortSignal) => {
|
||||
export const createRole = (
|
||||
roletypesPostableRoleDTO: RoletypesPostableRoleDTO,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<CreateRole201>({
|
||||
url: `/api/v1/roles`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: roletypesPostableRoleDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
@@ -129,13 +141,13 @@ export const getCreateRoleMutationOptions = <
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof createRole>>,
|
||||
TError,
|
||||
void,
|
||||
{ data: RoletypesPostableRoleDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof createRole>>,
|
||||
TError,
|
||||
void,
|
||||
{ data: RoletypesPostableRoleDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['createRole'];
|
||||
@@ -149,9 +161,11 @@ export const getCreateRoleMutationOptions = <
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof createRole>>,
|
||||
void
|
||||
> = () => {
|
||||
return createRole();
|
||||
{ data: RoletypesPostableRoleDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return createRole(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
@@ -160,7 +174,7 @@ export const getCreateRoleMutationOptions = <
|
||||
export type CreateRoleMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof createRole>>
|
||||
>;
|
||||
|
||||
export type CreateRoleMutationBody = RoletypesPostableRoleDTO;
|
||||
export type CreateRoleMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
@@ -173,13 +187,13 @@ export const useCreateRole = <
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof createRole>>,
|
||||
TError,
|
||||
void,
|
||||
{ data: RoletypesPostableRoleDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof createRole>>,
|
||||
TError,
|
||||
void,
|
||||
{ data: RoletypesPostableRoleDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getCreateRoleMutationOptions(options);
|
||||
@@ -358,10 +372,15 @@ export const invalidateGetRole = async (
|
||||
* This endpoint patches a role
|
||||
* @summary Patch role
|
||||
*/
|
||||
export const patchRole = ({ id }: PatchRolePathParameters) => {
|
||||
export const patchRole = (
|
||||
{ id }: PatchRolePathParameters,
|
||||
roletypesPatchableRoleDTO: RoletypesPatchableRoleDTO,
|
||||
) => {
|
||||
return GeneratedAPIInstance<string>({
|
||||
url: `/api/v1/roles/${id}`,
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: roletypesPatchableRoleDTO,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -372,13 +391,13 @@ export const getPatchRoleMutationOptions = <
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof patchRole>>,
|
||||
TError,
|
||||
{ pathParams: PatchRolePathParameters },
|
||||
{ pathParams: PatchRolePathParameters; data: RoletypesPatchableRoleDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof patchRole>>,
|
||||
TError,
|
||||
{ pathParams: PatchRolePathParameters },
|
||||
{ pathParams: PatchRolePathParameters; data: RoletypesPatchableRoleDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['patchRole'];
|
||||
@@ -392,11 +411,11 @@ export const getPatchRoleMutationOptions = <
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof patchRole>>,
|
||||
{ pathParams: PatchRolePathParameters }
|
||||
{ pathParams: PatchRolePathParameters; data: RoletypesPatchableRoleDTO }
|
||||
> = (props) => {
|
||||
const { pathParams } = props ?? {};
|
||||
const { pathParams, data } = props ?? {};
|
||||
|
||||
return patchRole(pathParams);
|
||||
return patchRole(pathParams, data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
@@ -405,7 +424,7 @@ export const getPatchRoleMutationOptions = <
|
||||
export type PatchRoleMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof patchRole>>
|
||||
>;
|
||||
|
||||
export type PatchRoleMutationBody = RoletypesPatchableRoleDTO;
|
||||
export type PatchRoleMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
@@ -418,16 +437,292 @@ export const usePatchRole = <
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof patchRole>>,
|
||||
TError,
|
||||
{ pathParams: PatchRolePathParameters },
|
||||
{ pathParams: PatchRolePathParameters; data: RoletypesPatchableRoleDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof patchRole>>,
|
||||
TError,
|
||||
{ pathParams: PatchRolePathParameters },
|
||||
{ pathParams: PatchRolePathParameters; data: RoletypesPatchableRoleDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getPatchRoleMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* Gets all objects connected to the specified role via a given relation type
|
||||
* @summary Get objects for a role by relation
|
||||
*/
|
||||
export const getObjects = (
|
||||
{ id, relation }: GetObjectsPathParameters,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetObjects200>({
|
||||
url: `/api/v1/roles/${id}/relation/${relation}/objects`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetObjectsQueryKey = ({
|
||||
id,
|
||||
relation,
|
||||
}: GetObjectsPathParameters) => {
|
||||
return ['getObjects'] as const;
|
||||
};
|
||||
|
||||
export const getGetObjectsQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getObjects>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
{ id, relation }: GetObjectsPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getObjects>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey =
|
||||
queryOptions?.queryKey ?? getGetObjectsQueryKey({ id, relation });
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof getObjects>>> = ({
|
||||
signal,
|
||||
}) => getObjects({ id, relation }, signal);
|
||||
|
||||
return {
|
||||
queryKey,
|
||||
queryFn,
|
||||
enabled: !!(id && relation),
|
||||
...queryOptions,
|
||||
} as UseQueryOptions<Awaited<ReturnType<typeof getObjects>>, TError, TData> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
};
|
||||
|
||||
export type GetObjectsQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getObjects>>
|
||||
>;
|
||||
export type GetObjectsQueryError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Get objects for a role by relation
|
||||
*/
|
||||
|
||||
export function useGetObjects<
|
||||
TData = Awaited<ReturnType<typeof getObjects>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
{ id, relation }: GetObjectsPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getObjects>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetObjectsQueryOptions({ id, relation }, options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get objects for a role by relation
|
||||
*/
|
||||
export const invalidateGetObjects = async (
|
||||
queryClient: QueryClient,
|
||||
{ id, relation }: GetObjectsPathParameters,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetObjectsQueryKey({ id, relation }) },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* Patches the objects connected to the specified role via a given relation type
|
||||
* @summary Patch objects for a role by relation
|
||||
*/
|
||||
export const patchObjects = (
|
||||
{ id, relation }: PatchObjectsPathParameters,
|
||||
roletypesPatchableObjectsDTO: RoletypesPatchableObjectsDTO,
|
||||
) => {
|
||||
return GeneratedAPIInstance<string>({
|
||||
url: `/api/v1/roles/${id}/relation/${relation}/objects`,
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: roletypesPatchableObjectsDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getPatchObjectsMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof patchObjects>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: PatchObjectsPathParameters;
|
||||
data: RoletypesPatchableObjectsDTO;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof patchObjects>>,
|
||||
TError,
|
||||
{ pathParams: PatchObjectsPathParameters; data: RoletypesPatchableObjectsDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['patchObjects'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof patchObjects>>,
|
||||
{ pathParams: PatchObjectsPathParameters; data: RoletypesPatchableObjectsDTO }
|
||||
> = (props) => {
|
||||
const { pathParams, data } = props ?? {};
|
||||
|
||||
return patchObjects(pathParams, data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type PatchObjectsMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof patchObjects>>
|
||||
>;
|
||||
export type PatchObjectsMutationBody = RoletypesPatchableObjectsDTO;
|
||||
export type PatchObjectsMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Patch objects for a role by relation
|
||||
*/
|
||||
export const usePatchObjects = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof patchObjects>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: PatchObjectsPathParameters;
|
||||
data: RoletypesPatchableObjectsDTO;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof patchObjects>>,
|
||||
TError,
|
||||
{ pathParams: PatchObjectsPathParameters; data: RoletypesPatchableObjectsDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getPatchObjectsMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* Gets all the available resources for role assignment
|
||||
* @summary Get resources
|
||||
*/
|
||||
export const getResources = (signal?: AbortSignal) => {
|
||||
return GeneratedAPIInstance<GetResources200>({
|
||||
url: `/api/v1/roles/resources`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetResourcesQueryKey = () => {
|
||||
return ['getResources'] as const;
|
||||
};
|
||||
|
||||
export const getGetResourcesQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getResources>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getResources>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
}) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getGetResourcesQueryKey();
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof getResources>>> = ({
|
||||
signal,
|
||||
}) => getResources(signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getResources>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type GetResourcesQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getResources>>
|
||||
>;
|
||||
export type GetResourcesQueryError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Get resources
|
||||
*/
|
||||
|
||||
export function useGetResources<
|
||||
TData = Awaited<ReturnType<typeof getResources>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getResources>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetResourcesQueryOptions(options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get resources
|
||||
*/
|
||||
export const invalidateGetResources = async (
|
||||
queryClient: QueryClient,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetResourcesQueryKey() },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
@@ -127,6 +127,18 @@ export interface AuthtypesGettableTokenDTO {
|
||||
tokenType?: string;
|
||||
}
|
||||
|
||||
export interface AuthtypesGettableTransactionDTO {
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
authorized?: boolean;
|
||||
object?: AuthtypesObjectDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
relation?: string;
|
||||
}
|
||||
|
||||
export type AuthtypesGoogleConfigDTODomainToAdminEmail = {
|
||||
[key: string]: string;
|
||||
};
|
||||
@@ -170,6 +182,10 @@ export interface AuthtypesGoogleConfigDTO {
|
||||
serviceAccountJson?: string;
|
||||
}
|
||||
|
||||
export interface AuthtypesNameDTO {
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface AuthtypesOIDCConfigDTO {
|
||||
claimMapping?: AuthtypesAttributeMappingDTO;
|
||||
/**
|
||||
@@ -198,6 +214,11 @@ export interface AuthtypesOIDCConfigDTO {
|
||||
issuerAlias?: string;
|
||||
}
|
||||
|
||||
export interface AuthtypesObjectDTO {
|
||||
resource: AuthtypesResourceDTO;
|
||||
selector: AuthtypesSelectorDTO;
|
||||
}
|
||||
|
||||
export interface AuthtypesOrgSessionContextDTO {
|
||||
authNSupport?: AuthtypesAuthNSupportDTO;
|
||||
/**
|
||||
@@ -248,6 +269,14 @@ export interface AuthtypesPostableRotateTokenDTO {
|
||||
refreshToken?: string;
|
||||
}
|
||||
|
||||
export interface AuthtypesResourceDTO {
|
||||
name: AuthtypesNameDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
type: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* @nullable
|
||||
*/
|
||||
@@ -291,6 +320,10 @@ export interface AuthtypesSamlConfigDTO {
|
||||
samlIdp?: string;
|
||||
}
|
||||
|
||||
export interface AuthtypesSelectorDTO {
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface AuthtypesSessionContextDTO {
|
||||
/**
|
||||
* @type boolean
|
||||
@@ -303,6 +336,18 @@ export interface AuthtypesSessionContextDTO {
|
||||
orgs?: AuthtypesOrgSessionContextDTO[] | null;
|
||||
}
|
||||
|
||||
export interface AuthtypesTransactionDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
object: AuthtypesObjectDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
relation: string;
|
||||
}
|
||||
|
||||
export interface AuthtypesUpdateableAuthDomainDTO {
|
||||
config?: AuthtypesAuthDomainConfigDTO;
|
||||
}
|
||||
@@ -453,18 +498,18 @@ export interface GatewaytypesGettableCreatedIngestionKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
value?: string;
|
||||
value: string;
|
||||
}
|
||||
|
||||
export interface GatewaytypesGettableCreatedIngestionKeyLimitDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
}
|
||||
|
||||
export interface GatewaytypesGettableIngestionKeysDTO {
|
||||
@@ -616,7 +661,7 @@ export interface GatewaytypesPostableIngestionKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name?: string;
|
||||
name: string;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
@@ -638,7 +683,7 @@ export interface GatewaytypesPostableIngestionKeyLimitDTO {
|
||||
}
|
||||
|
||||
export interface GatewaytypesUpdatableIngestionKeyLimitDTO {
|
||||
config?: GatewaytypesLimitConfigDTO;
|
||||
config: GatewaytypesLimitConfigDTO;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
@@ -1947,6 +1992,58 @@ export interface RenderErrorResponseDTO {
|
||||
status?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* @nullable
|
||||
*/
|
||||
export type RoletypesGettableResourcesDTORelations = {
|
||||
[key: string]: string[];
|
||||
} | null;
|
||||
|
||||
export interface RoletypesGettableResourcesDTO {
|
||||
/**
|
||||
* @type object
|
||||
* @nullable true
|
||||
*/
|
||||
relations?: RoletypesGettableResourcesDTORelations;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
resources?: AuthtypesResourceDTO[] | null;
|
||||
}
|
||||
|
||||
export interface RoletypesPatchableObjectsDTO {
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
additions: AuthtypesObjectDTO[] | null;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
deletions: AuthtypesObjectDTO[] | null;
|
||||
}
|
||||
|
||||
export interface RoletypesPatchableRoleDTO {
|
||||
/**
|
||||
* @type string
|
||||
* @nullable true
|
||||
*/
|
||||
description?: string | null;
|
||||
}
|
||||
|
||||
export interface RoletypesPostableRoleDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
description?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface RoletypesRoleDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -2414,6 +2511,102 @@ export interface TypesUserDTO {
|
||||
updatedAt?: Date;
|
||||
}
|
||||
|
||||
export interface ZeustypesGettableHostDTO {
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
hosts: ZeustypesHostDTO[] | null;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
state: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
tier: string;
|
||||
}
|
||||
|
||||
export interface ZeustypesHostDTO {
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
is_default: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface ZeustypesPostableHostDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface ZeustypesPostableProfileDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
existing_observability_tool: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
has_existing_observability_tool: boolean;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
logs_scale_per_day_in_gb: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
number_of_hosts: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
number_of_services: number;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
reasons_for_interest_in_signoz: string[] | null;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
timeline_for_migrating_to_signoz: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
uses_otel: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
where_did_you_discover_signoz: string;
|
||||
}
|
||||
|
||||
export type AuthzCheck200 = {
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
data?: AuthtypesGettableTransactionDTO[];
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type ChangePasswordPathParameters = {
|
||||
id: string;
|
||||
};
|
||||
@@ -2817,6 +3010,33 @@ export type GetRole200 = {
|
||||
export type PatchRolePathParameters = {
|
||||
id: string;
|
||||
};
|
||||
export type GetObjectsPathParameters = {
|
||||
id: string;
|
||||
relation: string;
|
||||
};
|
||||
export type GetObjects200 = {
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
data?: AuthtypesObjectDTO[];
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type PatchObjectsPathParameters = {
|
||||
id: string;
|
||||
relation: string;
|
||||
};
|
||||
export type GetResources200 = {
|
||||
data?: RoletypesGettableResourcesDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type ListUsers200 = {
|
||||
/**
|
||||
* @type array
|
||||
@@ -2954,7 +3174,7 @@ export type SearchIngestionKeysParams = {
|
||||
* @type string
|
||||
* @description undefined
|
||||
*/
|
||||
name?: string;
|
||||
name: string;
|
||||
/**
|
||||
* @type integer
|
||||
* @description undefined
|
||||
@@ -3129,6 +3349,14 @@ export type RotateSession200 = {
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type GetHosts200 = {
|
||||
data?: ZeustypesGettableHostDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type QueryRangeV5200 = {
|
||||
data?: Querybuildertypesv5QueryRangeResponseDTO;
|
||||
/**
|
||||
@@ -3136,3 +3364,11 @@ export type QueryRangeV5200 = {
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type ReplaceVariables200 = {
|
||||
data?: Querybuildertypesv5QueryRangeRequestDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
269
frontend/src/api/generated/services/zeus/index.ts
Normal file
269
frontend/src/api/generated/services/zeus/index.ts
Normal file
@@ -0,0 +1,269 @@
|
||||
/**
|
||||
* ! Do not edit manually
|
||||
* * The file has been auto-generated using Orval for SigNoz
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
QueryClient,
|
||||
QueryFunction,
|
||||
QueryKey,
|
||||
UseMutationOptions,
|
||||
UseMutationResult,
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
GetHosts200,
|
||||
RenderErrorResponseDTO,
|
||||
ZeustypesPostableHostDTO,
|
||||
ZeustypesPostableProfileDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* This endpoint gets the host info from zeus.
|
||||
* @summary Get host info from Zeus.
|
||||
*/
|
||||
export const getHosts = (signal?: AbortSignal) => {
|
||||
return GeneratedAPIInstance<GetHosts200>({
|
||||
url: `/api/v2/zeus/hosts`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetHostsQueryKey = () => {
|
||||
return ['getHosts'] as const;
|
||||
};
|
||||
|
||||
export const getGetHostsQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getHosts>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof getHosts>>, TError, TData>;
|
||||
}) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getGetHostsQueryKey();
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof getHosts>>> = ({
|
||||
signal,
|
||||
}) => getHosts(signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getHosts>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type GetHostsQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getHosts>>
|
||||
>;
|
||||
export type GetHostsQueryError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Get host info from Zeus.
|
||||
*/
|
||||
|
||||
export function useGetHosts<
|
||||
TData = Awaited<ReturnType<typeof getHosts>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof getHosts>>, TError, TData>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetHostsQueryOptions(options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get host info from Zeus.
|
||||
*/
|
||||
export const invalidateGetHosts = async (
|
||||
queryClient: QueryClient,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetHostsQueryKey() },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint saves the host of a deployment to zeus.
|
||||
* @summary Put host in Zeus for a deployment.
|
||||
*/
|
||||
export const putHost = (zeustypesPostableHostDTO: ZeustypesPostableHostDTO) => {
|
||||
return GeneratedAPIInstance<void>({
|
||||
url: `/api/v2/zeus/hosts`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: zeustypesPostableHostDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getPutHostMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['putHost'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
{ data: ZeustypesPostableHostDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return putHost(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type PutHostMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof putHost>>
|
||||
>;
|
||||
export type PutHostMutationBody = ZeustypesPostableHostDTO;
|
||||
export type PutHostMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Put host in Zeus for a deployment.
|
||||
*/
|
||||
export const usePutHost = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getPutHostMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint saves the profile of a deployment to zeus.
|
||||
* @summary Put profile in Zeus for a deployment.
|
||||
*/
|
||||
export const putProfile = (
|
||||
zeustypesPostableProfileDTO: ZeustypesPostableProfileDTO,
|
||||
) => {
|
||||
return GeneratedAPIInstance<void>({
|
||||
url: `/api/v2/zeus/profiles`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: zeustypesPostableProfileDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getPutProfileMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['putProfile'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
{ data: ZeustypesPostableProfileDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return putProfile(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type PutProfileMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof putProfile>>
|
||||
>;
|
||||
export type PutProfileMutationBody = ZeustypesPostableProfileDTO;
|
||||
export type PutProfileMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Put profile in Zeus for a deployment.
|
||||
*/
|
||||
export const usePutProfile = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getPutProfileMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
@@ -15,15 +15,7 @@ import { Events } from 'constants/events';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { eventEmitter } from 'utils/getEventEmitter';
|
||||
|
||||
import apiV1, {
|
||||
apiAlertManager,
|
||||
apiV2,
|
||||
apiV3,
|
||||
apiV4,
|
||||
apiV5,
|
||||
gatewayApiV1,
|
||||
gatewayApiV2,
|
||||
} from './apiV1';
|
||||
import apiV1, { apiAlertManager, apiV2, apiV3, apiV4, apiV5 } from './apiV1';
|
||||
import { Logout } from './utils';
|
||||
|
||||
const RESPONSE_TIMEOUT_THRESHOLD = 5000; // 5 seconds
|
||||
@@ -211,24 +203,6 @@ LogEventAxiosInstance.interceptors.response.use(
|
||||
LogEventAxiosInstance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// gateway Api V1
|
||||
export const GatewayApiV1Instance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV1}`,
|
||||
});
|
||||
|
||||
GatewayApiV1Instance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejected,
|
||||
);
|
||||
|
||||
GatewayApiV1Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// gateway Api V2
|
||||
export const GatewayApiV2Instance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV2}`,
|
||||
});
|
||||
|
||||
// generated API Instance
|
||||
export const GeneratedAPIInstance = axios.create({
|
||||
baseURL: ENVIRONMENT.baseURL,
|
||||
@@ -240,14 +214,6 @@ GeneratedAPIInstance.interceptors.response.use(
|
||||
interceptorRejected,
|
||||
);
|
||||
|
||||
GatewayApiV2Instance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejected,
|
||||
);
|
||||
|
||||
GatewayApiV2Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
AxiosAlertManagerInstance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejected,
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
import { GatewayApiV2Instance } from 'api';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { UpdateProfileProps } from 'types/api/onboarding/types';
|
||||
|
||||
const updateProfile = async (
|
||||
props: UpdateProfileProps,
|
||||
): Promise<SuccessResponse<UpdateProfileProps> | ErrorResponse> => {
|
||||
const response = await GatewayApiV2Instance.put('/profiles/me', {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
|
||||
export default updateProfile;
|
||||
@@ -42,6 +42,7 @@
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
padding: 16px;
|
||||
padding-bottom: 0;
|
||||
}
|
||||
|
||||
.title {
|
||||
@@ -190,7 +191,7 @@
|
||||
padding: 0px;
|
||||
}
|
||||
.log-detail-drawer__footer-hint {
|
||||
position: absolute;
|
||||
position: sticky;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
@@ -366,7 +367,7 @@
|
||||
}
|
||||
|
||||
.log-detail-drawer__footer-hint {
|
||||
position: absolute;
|
||||
position: sticky;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
|
||||
@@ -202,7 +202,7 @@ function AllEndPoints({
|
||||
|
||||
const onRowClick = useCallback(
|
||||
(props: any): void => {
|
||||
setSelectedEndPointName(props[SPAN_ATTRIBUTES.URL_PATH] as string);
|
||||
setSelectedEndPointName(props[SPAN_ATTRIBUTES.HTTP_URL] as string);
|
||||
setSelectedView(VIEWS.ENDPOINT_STATS);
|
||||
const initialItems = [
|
||||
...(filters?.items || []),
|
||||
@@ -213,7 +213,7 @@ function AllEndPoints({
|
||||
op: 'AND',
|
||||
});
|
||||
setParams({
|
||||
selectedEndPointName: props[SPAN_ATTRIBUTES.URL_PATH] as string,
|
||||
selectedEndPointName: props[SPAN_ATTRIBUTES.HTTP_URL] as string,
|
||||
selectedView: VIEWS.ENDPOINT_STATS,
|
||||
endPointDetailsLocalFilters: {
|
||||
items: initialItems,
|
||||
|
||||
@@ -33,7 +33,7 @@ import { SPAN_ATTRIBUTES } from './constants';
|
||||
|
||||
const httpUrlKey = {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
type: 'tag',
|
||||
};
|
||||
|
||||
@@ -93,7 +93,7 @@ function EndPointDetails({
|
||||
return currentFilters; // No change needed, prevents loop
|
||||
}
|
||||
|
||||
// Rebuild filters: Keep non-http.url filters and add/update http.url filter based on prop
|
||||
// Rebuild filters: Keep non-http_url filters and add/update http_url filter based on prop
|
||||
const otherFilters = currentFilters?.items?.filter(
|
||||
(item) => item.key?.key !== httpUrlKey.key,
|
||||
);
|
||||
@@ -125,7 +125,7 @@ function EndPointDetails({
|
||||
(newFilters: IBuilderQuery['filters']): void => {
|
||||
// 1. Update local filters state immediately
|
||||
setFilters(newFilters);
|
||||
// Filter out http.url filter before saving to params
|
||||
// Filter out http_url filter before saving to params
|
||||
const filteredNewFilters = {
|
||||
op: 'AND',
|
||||
items:
|
||||
@@ -299,7 +299,6 @@ function EndPointDetails({
|
||||
endPointStatusCodeLatencyBarChartsDataQuery
|
||||
}
|
||||
domainName={domainName}
|
||||
endPointName={endPointName}
|
||||
filters={filters}
|
||||
timeRange={timeRange}
|
||||
onDragSelect={onDragSelect}
|
||||
|
||||
@@ -56,15 +56,15 @@ function TopErrors({
|
||||
{
|
||||
items: endPointName
|
||||
? [
|
||||
// Remove any existing http.url filters from initialFilters to avoid duplicates
|
||||
// Remove any existing http_url filters from initialFilters to avoid duplicates
|
||||
...(initialFilters?.items?.filter(
|
||||
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
|
||||
(item) => item.key?.key !== SPAN_ATTRIBUTES.HTTP_URL,
|
||||
) || []),
|
||||
{
|
||||
id: '92b8a1c1',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
|
||||
@@ -9,6 +9,7 @@ import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { SPAN_ATTRIBUTES } from '../constants';
|
||||
import DomainMetrics from './DomainMetrics';
|
||||
|
||||
// Mock the API call
|
||||
@@ -126,11 +127,9 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
|
||||
'count()',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryA.filter.expression).toContain("http_host = '0.0.0.0'");
|
||||
expect(queryA.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryA.filter.expression).toContain(
|
||||
'url.full EXISTS OR http.url EXISTS',
|
||||
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
);
|
||||
|
||||
// Verify Query B - p99 latency
|
||||
@@ -142,17 +141,13 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
|
||||
'p99(duration_nano)',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryB.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryB.filter.expression).toContain("http_host = '0.0.0.0'");
|
||||
|
||||
// Verify Query C - error count (disabled)
|
||||
const queryC = queryData.find((q: any) => q.queryName === 'C');
|
||||
expect(queryC).toBeDefined();
|
||||
expect(queryC.disabled).toBe(true);
|
||||
expect(queryC.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryC.filter.expression).toContain("http_host = '0.0.0.0'");
|
||||
expect(queryC.aggregations?.[0]).toBeDefined();
|
||||
expect((queryC.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'count()',
|
||||
@@ -169,9 +164,7 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
|
||||
'max(timestamp)',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryD.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryD.filter.expression).toContain("http_host = '0.0.0.0'");
|
||||
|
||||
// Verify Formula F1 - error rate calculation
|
||||
const formulas = payload.query.builder.queryFormulas;
|
||||
|
||||
@@ -153,7 +153,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryA.filter) {
|
||||
expect(queryA.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
`http_host = 'api.example.com'`,
|
||||
);
|
||||
expect(queryA.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
@@ -171,7 +171,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryB.filter) {
|
||||
expect(queryB.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
`http_host = 'api.example.com'`,
|
||||
);
|
||||
expect(queryB.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
@@ -185,7 +185,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
expect(queryC.aggregateOperator).toBe('count');
|
||||
if (queryC.filter) {
|
||||
expect(queryC.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
`http_host = 'api.example.com'`,
|
||||
);
|
||||
expect(queryC.filter.expression).toContain("kind_string = 'Client'");
|
||||
expect(queryC.filter.expression).toContain('has_error = true');
|
||||
@@ -204,7 +204,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryD.filter) {
|
||||
expect(queryD.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
`http_host = 'api.example.com'`,
|
||||
);
|
||||
expect(queryD.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
@@ -221,7 +221,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
}
|
||||
if (queryE.filter) {
|
||||
expect(queryE.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
`http_host = 'api.example.com'`,
|
||||
);
|
||||
expect(queryE.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
@@ -291,7 +291,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
expect(query.filter.expression).toContain('staging');
|
||||
// Also verify domain filter is still present
|
||||
expect(query.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.internal.com' OR server.address = 'api.internal.com')",
|
||||
"http_host = 'api.internal.com'",
|
||||
);
|
||||
// Verify client kind filter is present
|
||||
expect(query.filter.expression).toContain("kind_string = 'Client'");
|
||||
|
||||
@@ -34,7 +34,6 @@ function StatusCodeBarCharts({
|
||||
endPointStatusCodeBarChartsDataQuery,
|
||||
endPointStatusCodeLatencyBarChartsDataQuery,
|
||||
domainName,
|
||||
endPointName,
|
||||
filters,
|
||||
timeRange,
|
||||
onDragSelect,
|
||||
@@ -48,7 +47,6 @@ function StatusCodeBarCharts({
|
||||
unknown
|
||||
>;
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
filters: IBuilderQuery['filters'];
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
@@ -144,11 +142,11 @@ function StatusCodeBarCharts({
|
||||
|
||||
const widget = useMemo<Widgets>(
|
||||
() =>
|
||||
getStatusCodeBarChartWidgetData(domainName, endPointName, {
|
||||
getStatusCodeBarChartWidgetData(domainName, {
|
||||
items: [...(filters?.items || [])],
|
||||
op: filters?.op || 'AND',
|
||||
}),
|
||||
[domainName, endPointName, filters],
|
||||
[domainName, filters],
|
||||
);
|
||||
|
||||
const graphClickHandler = useCallback(
|
||||
@@ -166,6 +164,7 @@ function StatusCodeBarCharts({
|
||||
xValue,
|
||||
TWO_AND_HALF_MINUTES_IN_MILLISECONDS,
|
||||
);
|
||||
|
||||
handleGraphClick({
|
||||
xValue,
|
||||
yValue,
|
||||
|
||||
@@ -12,7 +12,7 @@ export const VIEW_TYPES = {
|
||||
|
||||
// Span attribute keys - these are the source of truth for all attribute keys
|
||||
export const SPAN_ATTRIBUTES = {
|
||||
URL_PATH: 'http.url',
|
||||
HTTP_URL: 'http_url',
|
||||
RESPONSE_STATUS_CODE: 'response_status_code',
|
||||
SERVER_NAME: 'http_host',
|
||||
SERVER_PORT: 'net.peer.port',
|
||||
|
||||
@@ -280,7 +280,7 @@ describe('API Monitoring Utils', () => {
|
||||
const endpointFilter = result?.items?.find(
|
||||
(item) =>
|
||||
item.key &&
|
||||
item.key.key === SPAN_ATTRIBUTES.URL_PATH &&
|
||||
item.key.key === SPAN_ATTRIBUTES.HTTP_URL &&
|
||||
item.value === endPointName,
|
||||
);
|
||||
expect(endpointFilter).toBeDefined();
|
||||
@@ -344,13 +344,12 @@ describe('API Monitoring Utils', () => {
|
||||
describe('getFormattedEndPointDropDownData', () => {
|
||||
it('should format endpoint dropdown data correctly', () => {
|
||||
// Arrange
|
||||
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
|
||||
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
[URL_PATH_KEY]: '/api/users',
|
||||
'url.full': 'http://example.com/api/users',
|
||||
A: 150, // count or other metric
|
||||
},
|
||||
},
|
||||
@@ -358,7 +357,6 @@ describe('API Monitoring Utils', () => {
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
[URL_PATH_KEY]: '/api/orders',
|
||||
'url.full': 'http://example.com/api/orders',
|
||||
A: 75,
|
||||
},
|
||||
},
|
||||
@@ -406,7 +404,7 @@ describe('API Monitoring Utils', () => {
|
||||
|
||||
it('should handle items without URL path', () => {
|
||||
// Arrange
|
||||
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
|
||||
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
|
||||
type MockDataType = {
|
||||
data: {
|
||||
[key: string]: string | number;
|
||||
@@ -712,13 +710,11 @@ describe('API Monitoring Utils', () => {
|
||||
it('should generate widget configuration for status code bar chart', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '/api/test';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getStatusCodeBarChartWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
@@ -741,21 +737,11 @@ describe('API Monitoring Utils', () => {
|
||||
if (domainFilter) {
|
||||
expect(domainFilter.value).toBe(domainName);
|
||||
}
|
||||
|
||||
// Should have endpoint filter if provided
|
||||
const endpointFilter = queryData.filters?.items?.find(
|
||||
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.URL_PATH,
|
||||
);
|
||||
expect(endpointFilter).toBeDefined();
|
||||
if (endpointFilter) {
|
||||
expect(endpointFilter.value).toBe(endPointName);
|
||||
}
|
||||
});
|
||||
|
||||
it('should include custom filters in the widget configuration', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '/api/test';
|
||||
const customFilter = {
|
||||
id: 'custom-filter',
|
||||
key: {
|
||||
@@ -771,7 +757,6 @@ describe('API Monitoring Utils', () => {
|
||||
// Act
|
||||
const result = getStatusCodeBarChartWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ jest.mock('container/GridCardLayout/GridCard', () => ({
|
||||
type="button"
|
||||
data-testid="row-click-button"
|
||||
onClick={(): void =>
|
||||
customOnRowClick({ [SPAN_ATTRIBUTES.URL_PATH]: '/api/test' })
|
||||
customOnRowClick({ [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test' })
|
||||
}
|
||||
>
|
||||
Click Row
|
||||
|
||||
@@ -6,10 +6,10 @@
|
||||
* These tests validate the migration from V4 to V5 format for getAllEndpointsWidgetData:
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Aggregation format: aggregateAttribute → aggregations[] array
|
||||
* - Domain filter: (net.peer.name OR server.address)
|
||||
* - Domain filter: http_host = '${domainName}'
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Four queries: A (count), B (p99 latency), C (max timestamp), D (error count - disabled)
|
||||
* - GroupBy: Both http.url AND url.full with type 'attribute'
|
||||
* - GroupBy: http_url with type 'attribute'
|
||||
*/
|
||||
import { getAllEndpointsWidgetData } from 'container/ApiMonitoring/utils';
|
||||
import {
|
||||
@@ -18,6 +18,8 @@ import {
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
|
||||
|
||||
describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
const mockDomainName = 'api.example.com';
|
||||
const emptyFilters: IBuilderQuery['filters'] = {
|
||||
@@ -92,28 +94,28 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
|
||||
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
|
||||
|
||||
const baseExpression = `(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') AND kind_string = 'Client'`;
|
||||
const baseExpression = `http_host = '${mockDomainName}' AND kind_string = 'Client'`;
|
||||
|
||||
// Queries A, B, C have identical base filter
|
||||
expect(queryA.filter?.expression).toBe(
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
);
|
||||
expect(queryB.filter?.expression).toBe(
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
);
|
||||
expect(queryC.filter?.expression).toBe(
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
);
|
||||
|
||||
// Query D has additional has_error filter
|
||||
expect(queryD.filter?.expression).toBe(
|
||||
`${baseExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
`${baseExpression} AND has_error = true AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. GroupBy Structure', () => {
|
||||
it('default groupBy includes both http.url and url.full with type attribute', () => {
|
||||
it(`default groupBy includes ${SPAN_ATTRIBUTES.HTTP_URL} with type attribute`, () => {
|
||||
const widget = getAllEndpointsWidgetData(
|
||||
emptyGroupBy,
|
||||
mockDomainName,
|
||||
@@ -124,23 +126,13 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
|
||||
// All queries should have the same default groupBy
|
||||
queryData.forEach((query) => {
|
||||
expect(query.groupBy).toHaveLength(2);
|
||||
expect(query.groupBy).toHaveLength(1);
|
||||
|
||||
// http.url
|
||||
expect(query.groupBy).toContainEqual({
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'http.url',
|
||||
type: 'attribute',
|
||||
});
|
||||
|
||||
// url.full
|
||||
expect(query.groupBy).toContainEqual({
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'url.full',
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
type: 'attribute',
|
||||
});
|
||||
});
|
||||
@@ -170,19 +162,18 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
|
||||
// All queries should have defaults + custom groupBy
|
||||
queryData.forEach((query) => {
|
||||
expect(query.groupBy).toHaveLength(4); // 2 defaults + 2 custom
|
||||
expect(query.groupBy).toHaveLength(3); // 1 default + 2 custom
|
||||
|
||||
// First two should be defaults (http.url, url.full)
|
||||
expect(query.groupBy[0].key).toBe('http.url');
|
||||
expect(query.groupBy[1].key).toBe('url.full');
|
||||
// First two should be defaults (http_url)
|
||||
expect(query.groupBy[0].key).toBe(SPAN_ATTRIBUTES.HTTP_URL);
|
||||
|
||||
// Last two should be custom (matching subset of properties)
|
||||
expect(query.groupBy[2]).toMatchObject({
|
||||
expect(query.groupBy[1]).toMatchObject({
|
||||
dataType: DataTypes.String,
|
||||
key: 'service.name',
|
||||
type: 'resource',
|
||||
});
|
||||
expect(query.groupBy[3]).toMatchObject({
|
||||
expect(query.groupBy[2]).toMatchObject({
|
||||
dataType: DataTypes.String,
|
||||
key: 'deployment.environment',
|
||||
type: 'resource',
|
||||
|
||||
@@ -258,7 +258,7 @@ describe('EndPointDetails Component', () => {
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
@@ -278,7 +278,7 @@ describe('EndPointDetails Component', () => {
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
@@ -360,7 +360,7 @@ describe('EndPointDetails Component', () => {
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
@@ -373,7 +373,7 @@ describe('EndPointDetails Component', () => {
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
|
||||
@@ -191,7 +191,7 @@ describe('EndPointsDropDown Component', () => {
|
||||
|
||||
it('formats data using the utility function', () => {
|
||||
const mockRows = [
|
||||
{ data: { [SPAN_ATTRIBUTES.URL_PATH]: '/api/test', A: 10 } },
|
||||
{ data: { [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test', A: 10 } },
|
||||
];
|
||||
|
||||
const dataProps = {
|
||||
|
||||
@@ -6,15 +6,18 @@
|
||||
* These tests validate the migration from V4 to V5 format for the third payload
|
||||
* in getEndPointDetailsQueryPayload (endpoint dropdown data):
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Domain handling: (net.peer.name OR server.address)
|
||||
* - Domain handling: http_host = '${domainName}'
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Existence check: (http.url EXISTS OR url.full EXISTS)
|
||||
* - Existence check: http_url EXISTS
|
||||
* - Aggregation: count() expression
|
||||
* - GroupBy: Both http.url AND url.full with type 'attribute'
|
||||
* - GroupBy: http_url with type 'attribute'
|
||||
*/
|
||||
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
|
||||
|
||||
describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
const mockDomainName = 'api.example.com';
|
||||
const mockStartTime = 1000;
|
||||
@@ -43,9 +46,9 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters');
|
||||
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
// Base filter 1: Domain http_host = '${domainName}'
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
`http_host = '${mockDomainName}'`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
@@ -53,7 +56,7 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
|
||||
// Base filter 3: Existence check
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
'(http.url EXISTS OR url.full EXISTS)',
|
||||
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
);
|
||||
|
||||
// V5 Aggregation format: aggregations array (not aggregateAttribute)
|
||||
@@ -64,16 +67,11 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
});
|
||||
expect(queryA).not.toHaveProperty('aggregateAttribute');
|
||||
|
||||
// GroupBy: Both http.url and url.full
|
||||
expect(queryA.groupBy).toHaveLength(2);
|
||||
// GroupBy: http_url
|
||||
expect(queryA.groupBy).toHaveLength(1);
|
||||
expect(queryA.groupBy).toContainEqual({
|
||||
key: 'http.url',
|
||||
dataType: 'string',
|
||||
type: 'attribute',
|
||||
});
|
||||
expect(queryA.groupBy).toContainEqual({
|
||||
key: 'url.full',
|
||||
dataType: 'string',
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
dataType: DataTypes.String,
|
||||
type: 'attribute',
|
||||
});
|
||||
});
|
||||
@@ -120,53 +118,7 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
|
||||
// Exact filter expression with custom filters merged
|
||||
expect(expression).toBe(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND deployment.environment = 'production'",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('3. HTTP URL Filter Special Handling', () => {
|
||||
it('converts http.url filter to (http.url OR url.full) expression', () => {
|
||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'http-url-filter',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
dataType: 'string' as any,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: '/api/users',
|
||||
},
|
||||
{
|
||||
id: 'service-filter',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'user-service',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
filtersWithHttpUrl,
|
||||
);
|
||||
|
||||
const dropdownQuery = payload[2];
|
||||
const expression =
|
||||
dropdownQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// CRITICAL: Exact filter expression with http.url converted to OR logic
|
||||
expect(expression).toBe(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND (http.url = '/api/users' OR url.full = '/api/users')",
|
||||
`${SPAN_ATTRIBUTES.SERVER_NAME} = 'api.example.com' AND kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS service.name = 'user-service' AND deployment.environment = 'production'`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -33,7 +33,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
expect(queryData).not.toHaveProperty('filters.items');
|
||||
});
|
||||
|
||||
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
|
||||
it('uses new domain filter format: (http_host)', () => {
|
||||
const widget = getRateOverTimeWidgetData(
|
||||
mockDomainName,
|
||||
mockEndpointName,
|
||||
@@ -44,7 +44,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
|
||||
// Verify EXACT new filter format with OR operator
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
`http_host = '${mockDomainName}'`,
|
||||
);
|
||||
|
||||
// Endpoint name is used in legend, not filter
|
||||
@@ -90,7 +90,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
|
||||
// Verify domain filter is present
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
`http_host = '${mockDomainName}'`,
|
||||
);
|
||||
|
||||
// Verify custom filters are merged into the expression
|
||||
@@ -120,7 +120,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
expect(queryData).not.toHaveProperty('filters.items');
|
||||
});
|
||||
|
||||
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
|
||||
it('uses new domain filter format: (http_host)', () => {
|
||||
const widget = getLatencyOverTimeWidgetData(
|
||||
mockDomainName,
|
||||
mockEndpointName,
|
||||
@@ -132,7 +132,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
// Verify EXACT new filter format with OR operator
|
||||
expect(queryData.filter).toBeDefined();
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
`http_host = '${mockDomainName}'`,
|
||||
);
|
||||
|
||||
// Endpoint name is used in legend, not filter
|
||||
@@ -166,7 +166,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
|
||||
// Verify domain filter is present
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') service.name = 'user-service'`,
|
||||
`http_host = '${mockDomainName}' service.name = 'user-service'`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -142,7 +142,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
endTime: 1609545600000,
|
||||
};
|
||||
const mockDomainName = 'test-domain';
|
||||
const mockEndPointName = '/api/test';
|
||||
const onDragSelectMock = jest.fn();
|
||||
const refetchFn = jest.fn();
|
||||
|
||||
@@ -232,7 +231,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -268,7 +266,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -311,7 +308,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -356,7 +352,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -404,7 +399,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -419,7 +413,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
// but we've confirmed the function is mocked and ready to be tested
|
||||
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
|
||||
mockDomainName,
|
||||
mockEndPointName,
|
||||
expect.objectContaining({
|
||||
items: [],
|
||||
op: 'AND',
|
||||
@@ -467,7 +460,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockCustomFilters as IBuilderQuery['filters']}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -477,7 +469,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
// Assert widget creation was called with the correct parameters
|
||||
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
|
||||
mockDomainName,
|
||||
mockEndPointName,
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({ id: 'custom-filter' }),
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
*
|
||||
* V5 Changes:
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Domain filter: (net.peer.name OR server.address)
|
||||
* - Domain filter: (http_host)
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - stepInterval: 60 → null
|
||||
* - Grouped by response_status_code
|
||||
@@ -47,9 +47,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters.items');
|
||||
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
// Base filter 1: Domain (http_host)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
`http_host = '${mockDomainName}'`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
@@ -96,9 +96,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters.items');
|
||||
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
// Base filter 1: Domain (http_host)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
`http_host = '${mockDomainName}'`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
@@ -177,7 +177,7 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
expect(callsExpression).toBe(latencyExpression);
|
||||
|
||||
// Verify base filters
|
||||
expect(callsExpression).toContain('net.peer.name');
|
||||
expect(callsExpression).toContain('http_host');
|
||||
expect(callsExpression).toContain("kind_string = 'Client'");
|
||||
|
||||
// Verify custom filters are merged
|
||||
@@ -187,51 +187,4 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
expect(callsExpression).toContain('production');
|
||||
});
|
||||
});
|
||||
|
||||
describe('4. HTTP URL Filter Handling', () => {
|
||||
it('converts http.url filter to (http.url OR url.full) expression in both charts', () => {
|
||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'http-url-filter',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
dataType: 'string' as any,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: '/api/metrics',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
filtersWithHttpUrl,
|
||||
);
|
||||
|
||||
const callsChartQuery = payload[4];
|
||||
const latencyChartQuery = payload[5];
|
||||
|
||||
const callsExpression =
|
||||
callsChartQuery.query.builder.queryData[0].filter?.expression;
|
||||
const latencyExpression =
|
||||
latencyChartQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// CRITICAL: http.url converted to OR logic
|
||||
expect(callsExpression).toContain(
|
||||
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
|
||||
);
|
||||
expect(latencyExpression).toContain(
|
||||
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
|
||||
);
|
||||
|
||||
// Base filters still present
|
||||
expect(callsExpression).toContain('net.peer.name');
|
||||
expect(callsExpression).toContain("kind_string = 'Client'");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
* These tests validate the migration from V4 to V5 format for the second payload
|
||||
* in getEndPointDetailsQueryPayload (status code table data):
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - URL handling: Special logic for (http.url OR url.full)
|
||||
* - Domain filter: (net.peer.name OR server.address)
|
||||
* - URL handling: Special logic for http_url
|
||||
* - Domain filter: http_host = '${domainName}'
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Kind filter: response_status_code EXISTS
|
||||
* - Three queries: A (count), B (p99 latency), C (rate)
|
||||
@@ -45,9 +45,9 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters.items');
|
||||
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
// Base filter 1: Domain (http_host)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
`http_host = '${mockDomainName}'`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
@@ -149,7 +149,7 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
|
||||
statusCodeQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// Base filters present
|
||||
expect(expression).toContain('net.peer.name');
|
||||
expect(expression).toContain('http_host');
|
||||
expect(expression).toContain("kind_string = 'Client'");
|
||||
expect(expression).toContain('response_status_code EXISTS');
|
||||
|
||||
@@ -165,62 +165,4 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
|
||||
expect(queries[1].filter?.expression).toBe(queries[2].filter?.expression);
|
||||
});
|
||||
});
|
||||
|
||||
describe('4. HTTP URL Filter Handling', () => {
|
||||
it('converts http.url filter to (http.url OR url.full) expression', () => {
|
||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'http-url-filter',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
dataType: 'string' as any,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: '/api/users',
|
||||
},
|
||||
{
|
||||
id: 'service-filter',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'user-service',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
filtersWithHttpUrl,
|
||||
);
|
||||
|
||||
const statusCodeQuery = payload[1];
|
||||
const expression =
|
||||
statusCodeQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// CRITICAL: http.url converted to OR logic
|
||||
expect(expression).toContain(
|
||||
"(http.url = '/api/users' OR url.full = '/api/users')",
|
||||
);
|
||||
|
||||
// Other filters still present
|
||||
expect(expression).toContain('service.name');
|
||||
expect(expression).toContain('user-service');
|
||||
|
||||
// Base filters present
|
||||
expect(expression).toContain('net.peer.name');
|
||||
expect(expression).toContain("kind_string = 'Client'");
|
||||
expect(expression).toContain('response_status_code EXISTS');
|
||||
|
||||
// All ANDed together (at least 2 ANDs: domain+kind, custom filter, url condition)
|
||||
expect(expression?.match(/AND/g)?.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -84,7 +84,7 @@ describe('TopErrors', () => {
|
||||
{
|
||||
columns: [
|
||||
{
|
||||
name: 'http.url',
|
||||
name: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
@@ -124,7 +124,7 @@ describe('TopErrors', () => {
|
||||
table: {
|
||||
rows: [
|
||||
{
|
||||
'http.url': '/api/test',
|
||||
http_url: '/api/test',
|
||||
A: 100,
|
||||
},
|
||||
],
|
||||
@@ -206,7 +206,7 @@ describe('TopErrors', () => {
|
||||
expect(navigateMock).toHaveBeenCalledWith({
|
||||
filters: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: 'http.url' }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
op: '=',
|
||||
value: '/api/test',
|
||||
}),
|
||||
@@ -335,7 +335,7 @@ describe('TopErrors', () => {
|
||||
|
||||
// Verify all required filters are present
|
||||
expect(filterExpression).toContain(
|
||||
`kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) AND (net.peer.name = 'test-domain' OR server.address = 'test-domain') AND has_error = true`,
|
||||
`kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS AND ${SPAN_ATTRIBUTES.SERVER_NAME} = 'test-domain' AND has_error = true`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -15,7 +15,6 @@ import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsAppli
|
||||
import { convertNanoToMilliseconds } from 'container/MetricsExplorer/Summary/utils';
|
||||
import dayjs from 'dayjs';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { ArrowUpDown, ChevronDown, ChevronRight, Info } from 'lucide-react';
|
||||
import { getWidgetQuery } from 'pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil';
|
||||
@@ -57,12 +56,12 @@ export const getDisplayValue = (value: unknown): string =>
|
||||
isEmptyFilterValue(value) ? '-' : String(value);
|
||||
|
||||
export const getDomainNameFilterExpression = (domainName: string): string =>
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`;
|
||||
`http_host = '${domainName}'`;
|
||||
|
||||
export const clientKindExpression = `kind_string = 'Client'`;
|
||||
|
||||
/**
|
||||
* Converts filters to expression, handling http.url specially by creating (http.url OR url.full) condition
|
||||
* Converts filters to expression
|
||||
* @param filters Filters to convert
|
||||
* @param baseExpression Base expression to combine with filters
|
||||
* @returns Filter expression string
|
||||
@@ -75,34 +74,6 @@ export const convertFiltersWithUrlHandling = (
|
||||
return baseExpression;
|
||||
}
|
||||
|
||||
// Check if filters contain http.url (SPAN_ATTRIBUTES.URL_PATH)
|
||||
const httpUrlFilter = filters.items?.find(
|
||||
(item) => item.key?.key === SPAN_ATTRIBUTES.URL_PATH,
|
||||
);
|
||||
|
||||
// If http.url filter exists, create modified filters with (http.url OR url.full)
|
||||
if (httpUrlFilter && httpUrlFilter.value) {
|
||||
// Remove ALL http.url filters from items (guards against duplicates)
|
||||
const otherFilters = filters.items?.filter(
|
||||
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
|
||||
);
|
||||
|
||||
// Convert to expression first with other filters
|
||||
const {
|
||||
filter: intermediateFilter,
|
||||
} = convertFiltersToExpressionWithExistingQuery(
|
||||
{ ...filters, items: otherFilters || [] },
|
||||
baseExpression,
|
||||
);
|
||||
|
||||
// Add the OR condition for http.url and url.full
|
||||
const urlValue = httpUrlFilter.value;
|
||||
const urlCondition = `(http.url = '${urlValue}' OR url.full = '${urlValue}')`;
|
||||
return intermediateFilter.expression.trim()
|
||||
? `${intermediateFilter.expression} AND ${urlCondition}`
|
||||
: urlCondition;
|
||||
}
|
||||
|
||||
const { filter } = convertFiltersToExpressionWithExistingQuery(
|
||||
filters,
|
||||
baseExpression,
|
||||
@@ -371,7 +342,7 @@ export const formatDataForTable = (
|
||||
});
|
||||
};
|
||||
|
||||
const urlExpression = `(url.full EXISTS OR http.url EXISTS)`;
|
||||
const urlExpression = `${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`;
|
||||
|
||||
export const getDomainMetricsQueryPayload = (
|
||||
domainName: string,
|
||||
@@ -588,14 +559,7 @@ const defaultGroupBy = [
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
type: 'attribute',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'url.full',
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
type: 'attribute',
|
||||
},
|
||||
// {
|
||||
@@ -867,8 +831,8 @@ function buildFilterExpression(
|
||||
): string {
|
||||
const baseFilterParts = [
|
||||
`kind_string = 'Client'`,
|
||||
`(http.url EXISTS OR url.full EXISTS)`,
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
|
||||
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
`${SPAN_ATTRIBUTES.SERVER_NAME} = '${domainName}'`,
|
||||
`has_error = true`,
|
||||
];
|
||||
if (showStatusCodeErrors) {
|
||||
@@ -910,12 +874,7 @@ export const getTopErrorsQueryPayload = (
|
||||
filter: { expression: filterExpression },
|
||||
groupBy: [
|
||||
{
|
||||
name: 'http.url',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
{
|
||||
name: 'url.full',
|
||||
name: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
@@ -1134,11 +1093,11 @@ export const formatEndPointsDataForTable = (
|
||||
if (!isGroupedByAttribute) {
|
||||
formattedData = data?.map((endpoint) => {
|
||||
const { port } = extractPortAndEndpoint(
|
||||
(endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '',
|
||||
(endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '',
|
||||
);
|
||||
return {
|
||||
key: v4(),
|
||||
endpointName: (endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '-',
|
||||
endpointName: (endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '-',
|
||||
port,
|
||||
callCount:
|
||||
endpoint.data.A === 'n/a' || endpoint.data.A === undefined
|
||||
@@ -1262,9 +1221,7 @@ export const formatTopErrorsDataForTable = (
|
||||
|
||||
return {
|
||||
key: v4(),
|
||||
endpointName: getDisplayValue(
|
||||
rowObj[SPAN_ATTRIBUTES.URL_PATH] || rowObj['url.full'],
|
||||
),
|
||||
endpointName: getDisplayValue(rowObj[SPAN_ATTRIBUTES.HTTP_URL]),
|
||||
statusCode: getDisplayValue(rowObj[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]),
|
||||
statusMessage: getDisplayValue(rowObj.status_message),
|
||||
count: getDisplayValue(rowObj.__result_0),
|
||||
@@ -1281,10 +1238,10 @@ export const getTopErrorsCoRelationQueryFilters = (
|
||||
{
|
||||
id: 'ea16470b',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
id: 'http.url--string--tag--false',
|
||||
id: `${SPAN_ATTRIBUTES.HTTP_URL}--string--tag--false`,
|
||||
},
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
@@ -1781,7 +1738,7 @@ export const getEndPointDetailsQueryPayload = (
|
||||
filters || { items: [], op: 'AND' },
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
)} AND ${clientKindExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
),
|
||||
},
|
||||
expression: 'A',
|
||||
@@ -1793,12 +1750,7 @@ export const getEndPointDetailsQueryPayload = (
|
||||
orderBy: [],
|
||||
groupBy: [
|
||||
{
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
dataType: DataTypes.String,
|
||||
type: 'attribute',
|
||||
},
|
||||
{
|
||||
key: 'url.full',
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
dataType: DataTypes.String,
|
||||
type: 'attribute',
|
||||
},
|
||||
@@ -2225,7 +2177,7 @@ export const getEndPointZeroStateQueryPayload = (
|
||||
orderBy: [],
|
||||
groupBy: [
|
||||
{
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -2419,8 +2371,7 @@ export const statusCodeWidgetInfo = [
|
||||
|
||||
interface EndPointDropDownResponseRow {
|
||||
data: {
|
||||
[SPAN_ATTRIBUTES.URL_PATH]: string;
|
||||
'url.full': string;
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: string;
|
||||
A: number;
|
||||
};
|
||||
}
|
||||
@@ -2439,8 +2390,8 @@ export const getFormattedEndPointDropDownData = (
|
||||
}
|
||||
return data.map((row) => ({
|
||||
key: v4(),
|
||||
label: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
|
||||
value: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
|
||||
label: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
|
||||
value: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
|
||||
}));
|
||||
};
|
||||
|
||||
@@ -2769,7 +2720,6 @@ export const groupStatusCodes = (
|
||||
|
||||
export const getStatusCodeBarChartWidgetData = (
|
||||
domainName: string,
|
||||
endPointName: string,
|
||||
filters: IBuilderQuery['filters'],
|
||||
): Widgets => ({
|
||||
query: {
|
||||
@@ -2798,20 +2748,6 @@ export const getStatusCodeBarChartWidgetData = (
|
||||
op: '=',
|
||||
value: domainName,
|
||||
},
|
||||
...(endPointName
|
||||
? [
|
||||
{
|
||||
id: '8b1be6f0',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
...(filters?.items || []),
|
||||
],
|
||||
op: 'AND',
|
||||
@@ -2933,7 +2869,7 @@ export const getAllEndpointsWidgetData = (
|
||||
filters,
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
)} AND ${clientKindExpression} AND http_url EXISTS`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -2965,7 +2901,7 @@ export const getAllEndpointsWidgetData = (
|
||||
filters,
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
)} AND ${clientKindExpression} AND http_url EXISTS`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -2997,7 +2933,7 @@ export const getAllEndpointsWidgetData = (
|
||||
filters,
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
)} AND ${clientKindExpression} AND http_url EXISTS`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -3029,7 +2965,7 @@ export const getAllEndpointsWidgetData = (
|
||||
filters,
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
)} AND ${clientKindExpression} AND has_error = true AND http_url EXISTS`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -3060,24 +2996,12 @@ export const getAllEndpointsWidgetData = (
|
||||
);
|
||||
|
||||
widget.renderColumnCell = {
|
||||
[SPAN_ATTRIBUTES.URL_PATH]: (
|
||||
url: string | number,
|
||||
record?: RowData,
|
||||
): ReactNode => {
|
||||
// First try to use the url from the column value
|
||||
let urlValue = url;
|
||||
|
||||
// If url is empty/null and we have the record, fallback to url.full
|
||||
if (isEmptyFilterValue(url) && record) {
|
||||
const { 'url.full': urlFull } = record;
|
||||
urlValue = urlFull;
|
||||
}
|
||||
|
||||
if (!urlValue || urlValue === 'n/a') {
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: (url: string | number): ReactNode => {
|
||||
if (isEmptyFilterValue(url) || !url || url === 'n/a') {
|
||||
return <span>-</span>;
|
||||
}
|
||||
|
||||
const { endpoint } = extractPortAndEndpoint(String(urlValue));
|
||||
const { endpoint } = extractPortAndEndpoint(String(url));
|
||||
return <span>{getDisplayValue(endpoint)}</span>;
|
||||
},
|
||||
A: (numOfCalls: any): ReactNode => (
|
||||
@@ -3132,8 +3056,8 @@ export const getAllEndpointsWidgetData = (
|
||||
};
|
||||
|
||||
widget.customColTitles = {
|
||||
[SPAN_ATTRIBUTES.URL_PATH]: 'Endpoint',
|
||||
'net.peer.port': 'Port',
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: 'Endpoint',
|
||||
[SPAN_ATTRIBUTES.SERVER_PORT]: 'Port',
|
||||
};
|
||||
|
||||
widget.title = (
|
||||
@@ -3158,12 +3082,10 @@ export const getAllEndpointsWidgetData = (
|
||||
</div>
|
||||
);
|
||||
|
||||
widget.hiddenColumns = ['url.full'];
|
||||
|
||||
return widget;
|
||||
};
|
||||
|
||||
const keysToRemove = ['http.url', 'url.full', 'A', 'B', 'C', 'F1'];
|
||||
const keysToRemove = [SPAN_ATTRIBUTES.HTTP_URL, 'A', 'B', 'C', 'F1'];
|
||||
|
||||
export const getGroupByFiltersFromGroupByValues = (
|
||||
rowData: any,
|
||||
@@ -3221,7 +3143,7 @@ export const getRateOverTimeWidgetData = (
|
||||
filter: {
|
||||
expression: convertFiltersWithUrlHandling(
|
||||
filters || { items: [], op: 'AND' },
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
|
||||
`http_host = '${domainName}'`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -3272,7 +3194,7 @@ export const getLatencyOverTimeWidgetData = (
|
||||
filter: {
|
||||
expression: convertFiltersWithUrlHandling(
|
||||
filters || { items: [], op: 'AND' },
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
|
||||
`http_host = '${domainName}'`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
/* eslint-disable jsx-a11y/no-static-element-interactions */
|
||||
/* eslint-disable jsx-a11y/click-events-have-key-events */
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useCopyToClipboard } from 'react-use';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import {
|
||||
@@ -15,14 +14,16 @@ import {
|
||||
Tag,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import updateSubDomainAPI from 'api/customDomain/updateSubDomain';
|
||||
import {
|
||||
RenderErrorResponseDTO,
|
||||
ZeustypesHostDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { useGetHosts, usePutHost } from 'api/generated/services/zeus';
|
||||
import { AxiosError } from 'axios';
|
||||
import LaunchChatSupport from 'components/LaunchChatSupport/LaunchChatSupport';
|
||||
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { InfoIcon, Link2, Pencil } from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { HostsProps } from 'types/api/customDomain/types';
|
||||
|
||||
import './CustomDomainSettings.styles.scss';
|
||||
|
||||
@@ -35,7 +36,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
const { notifications } = useNotifications();
|
||||
const [isEditModalOpen, setIsEditModalOpen] = useState(false);
|
||||
const [isPollingEnabled, setIsPollingEnabled] = useState(false);
|
||||
const [hosts, setHosts] = useState<HostsProps[] | null>(null);
|
||||
const [hosts, setHosts] = useState<ZeustypesHostDTO[] | null>(null);
|
||||
|
||||
const [updateDomainError, setUpdateDomainError] = useState<AxiosError | null>(
|
||||
null,
|
||||
@@ -57,36 +58,37 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
};
|
||||
|
||||
const {
|
||||
data: deploymentsData,
|
||||
isLoading: isLoadingDeploymentsData,
|
||||
isFetching: isFetchingDeploymentsData,
|
||||
refetch: refetchDeploymentsData,
|
||||
} = useGetDeploymentsData(true);
|
||||
data: hostsData,
|
||||
isLoading: isLoadingHosts,
|
||||
isFetching: isFetchingHosts,
|
||||
refetch: refetchHosts,
|
||||
} = useGetHosts();
|
||||
|
||||
const {
|
||||
mutate: updateSubDomain,
|
||||
isLoading: isLoadingUpdateCustomDomain,
|
||||
} = useMutation(updateSubDomainAPI, {
|
||||
onSuccess: () => {
|
||||
setIsPollingEnabled(true);
|
||||
refetchDeploymentsData();
|
||||
setIsEditModalOpen(false);
|
||||
},
|
||||
onError: (error: AxiosError) => {
|
||||
setUpdateDomainError(error);
|
||||
setIsPollingEnabled(false);
|
||||
},
|
||||
});
|
||||
} = usePutHost<AxiosError<RenderErrorResponseDTO>>();
|
||||
|
||||
const stripProtocol = (url: string): string => {
|
||||
return url?.split('://')[1] ?? url;
|
||||
};
|
||||
|
||||
const dnsSuffix = useMemo(() => {
|
||||
const defaultHost = hosts?.find((h) => h.is_default);
|
||||
return defaultHost?.url && defaultHost?.name
|
||||
? defaultHost.url.split(`${defaultHost.name}.`)[1] || ''
|
||||
: '';
|
||||
}, [hosts]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isFetchingDeploymentsData) {
|
||||
if (isFetchingHosts) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (deploymentsData?.data?.status === 'success') {
|
||||
setHosts(deploymentsData.data.data.hosts);
|
||||
if (hostsData?.data?.status === 'success') {
|
||||
setHosts(hostsData?.data?.data?.hosts ?? null);
|
||||
|
||||
const activeCustomDomain = deploymentsData.data.data.hosts.find(
|
||||
const activeCustomDomain = hostsData?.data?.data?.hosts?.find(
|
||||
(host) => !host.is_default,
|
||||
);
|
||||
|
||||
@@ -97,32 +99,36 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
}
|
||||
}
|
||||
|
||||
if (deploymentsData?.data?.data?.state !== 'HEALTHY' && isPollingEnabled) {
|
||||
if (hostsData?.data?.data?.state !== 'HEALTHY' && isPollingEnabled) {
|
||||
setTimeout(() => {
|
||||
refetchDeploymentsData();
|
||||
refetchHosts();
|
||||
}, 3000);
|
||||
}
|
||||
|
||||
if (deploymentsData?.data?.data.state === 'HEALTHY') {
|
||||
if (hostsData?.data?.data?.state === 'HEALTHY') {
|
||||
setIsPollingEnabled(false);
|
||||
}
|
||||
}, [
|
||||
deploymentsData,
|
||||
refetchDeploymentsData,
|
||||
isPollingEnabled,
|
||||
isFetchingDeploymentsData,
|
||||
]);
|
||||
}, [hostsData, refetchHosts, isPollingEnabled, isFetchingHosts]);
|
||||
|
||||
const onUpdateCustomDomainSettings = (): void => {
|
||||
editForm
|
||||
.validateFields()
|
||||
.then((values) => {
|
||||
if (values.subdomain) {
|
||||
updateSubDomain({
|
||||
data: {
|
||||
name: values.subdomain,
|
||||
updateSubDomain(
|
||||
{ data: { name: values.subdomain } },
|
||||
{
|
||||
onSuccess: () => {
|
||||
setIsPollingEnabled(true);
|
||||
refetchHosts();
|
||||
setIsEditModalOpen(false);
|
||||
},
|
||||
onError: (error: AxiosError<RenderErrorResponseDTO>) => {
|
||||
setUpdateDomainError(error as AxiosError);
|
||||
setIsPollingEnabled(false);
|
||||
},
|
||||
},
|
||||
});
|
||||
);
|
||||
|
||||
setCustomDomainDetails({
|
||||
subdomain: values.subdomain,
|
||||
@@ -134,10 +140,8 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
});
|
||||
};
|
||||
|
||||
const onCopyUrlHandler = (host: string): void => {
|
||||
const url = `${host}.${deploymentsData?.data.data.cluster.region.dns}`;
|
||||
|
||||
setCopyUrl(url);
|
||||
const onCopyUrlHandler = (url: string): void => {
|
||||
setCopyUrl(stripProtocol(url));
|
||||
notifications.success({
|
||||
message: 'Copied to clipboard',
|
||||
});
|
||||
@@ -157,7 +161,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
</div>
|
||||
|
||||
<div className="custom-domain-settings-content">
|
||||
{!isLoadingDeploymentsData && (
|
||||
{!isLoadingHosts && (
|
||||
<Card className="custom-domain-settings-card">
|
||||
<div className="custom-domain-settings-content-header">
|
||||
Team {org?.[0]?.displayName} Information
|
||||
@@ -169,10 +173,9 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
<div
|
||||
className="custom-domain-url"
|
||||
key={host.name}
|
||||
onClick={(): void => onCopyUrlHandler(host.name)}
|
||||
onClick={(): void => onCopyUrlHandler(host.url || '')}
|
||||
>
|
||||
<Link2 size={12} /> {host.name}.
|
||||
{deploymentsData?.data.data.cluster.region.dns}
|
||||
<Link2 size={12} /> {stripProtocol(host.url || '')}
|
||||
{host.is_default && <Tag color={Color.BG_ROBIN_500}>Default</Tag>}
|
||||
</div>
|
||||
))}
|
||||
@@ -181,11 +184,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
<div className="custom-domain-url-edit-btn">
|
||||
<Button
|
||||
className="periscope-btn"
|
||||
disabled={
|
||||
isLoadingDeploymentsData ||
|
||||
isFetchingDeploymentsData ||
|
||||
isPollingEnabled
|
||||
}
|
||||
disabled={isLoadingHosts || isFetchingHosts || isPollingEnabled}
|
||||
type="default"
|
||||
icon={<Pencil size={10} />}
|
||||
onClick={(): void => setIsEditModalOpen(true)}
|
||||
@@ -198,7 +197,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
{isPollingEnabled && (
|
||||
<Alert
|
||||
className="custom-domain-update-status"
|
||||
message={`Updating your URL to ⎯ ${customDomainDetails?.subdomain}.${deploymentsData?.data.data.cluster.region.dns}. This may take a few mins.`}
|
||||
message={`Updating your URL to ⎯ ${customDomainDetails?.subdomain}.${dnsSuffix}. This may take a few mins.`}
|
||||
type="info"
|
||||
icon={<InfoIcon size={12} />}
|
||||
/>
|
||||
@@ -206,7 +205,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{isLoadingDeploymentsData && (
|
||||
{isLoadingHosts && (
|
||||
<Card className="custom-domain-settings-card">
|
||||
<Skeleton
|
||||
className="custom-domain-settings-skeleton"
|
||||
@@ -255,7 +254,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
addonBefore={updateDomainError && <InfoIcon size={12} color="red" />}
|
||||
placeholder="Enter Domain"
|
||||
onChange={(): void => setUpdateDomainError(null)}
|
||||
addonAfter={deploymentsData?.data.data.cluster.region.dns}
|
||||
addonAfter={dnsSuffix}
|
||||
autoFocus
|
||||
/>
|
||||
</Form.Item>
|
||||
@@ -267,7 +266,8 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
{updateDomainError.status === 409 ? (
|
||||
<Alert
|
||||
message={
|
||||
(updateDomainError?.response?.data as { error?: string })?.error ||
|
||||
(updateDomainError?.response?.data as RenderErrorResponseDTO)?.error
|
||||
?.message ||
|
||||
'You’ve already updated the custom domain once today. To make further changes, please contact our support team for assistance.'
|
||||
}
|
||||
type="warning"
|
||||
@@ -275,7 +275,10 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
/>
|
||||
) : (
|
||||
<Typography.Text type="danger">
|
||||
{(updateDomainError.response?.data as { error: string })?.error}
|
||||
{
|
||||
(updateDomainError?.response?.data as RenderErrorResponseDTO)?.error
|
||||
?.message
|
||||
}
|
||||
</Typography.Text>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,128 @@
|
||||
import { GetHosts200 } from 'api/generated/services/sigNoz.schemas';
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
|
||||
import CustomDomainSettings from '../CustomDomainSettings';
|
||||
|
||||
const ZEUS_HOSTS_ENDPOINT = '*/api/v2/zeus/hosts';
|
||||
|
||||
const mockHostsResponse: GetHosts200 = {
|
||||
status: 'success',
|
||||
data: {
|
||||
name: 'accepted-starfish',
|
||||
state: 'HEALTHY',
|
||||
tier: 'PREMIUM',
|
||||
hosts: [
|
||||
{
|
||||
name: 'accepted-starfish',
|
||||
is_default: true,
|
||||
url: 'https://accepted-starfish.test.cloud',
|
||||
},
|
||||
{
|
||||
name: 'custom-host',
|
||||
is_default: false,
|
||||
url: 'https://custom-host.test.cloud',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
describe('CustomDomainSettings', () => {
|
||||
afterEach(() => server.resetHandlers());
|
||||
|
||||
it('renders host URLs with protocol stripped and marks the default host', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<CustomDomainSettings />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
await screen.findByText(/custom-host\.test\.cloud/i);
|
||||
expect(screen.getByText('Default')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('opens edit modal with DNS suffix derived from the default host', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
render(<CustomDomainSettings />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
|
||||
await user.click(
|
||||
screen.getByRole('button', { name: /customize team['’]s url/i }),
|
||||
);
|
||||
|
||||
expect(
|
||||
screen.getByRole('dialog', { name: /customize your team['’]s url/i }),
|
||||
).toBeInTheDocument();
|
||||
// DNS suffix is the part of the default host URL after the name prefix
|
||||
expect(screen.getByText('test.cloud')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('submits PUT to /zeus/hosts with the entered subdomain as the payload', async () => {
|
||||
let capturedBody: Record<string, unknown> = {};
|
||||
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
rest.put(ZEUS_HOSTS_ENDPOINT, async (req, res, ctx) => {
|
||||
capturedBody = await req.json<Record<string, unknown>>();
|
||||
return res(ctx.status(200), ctx.json({}));
|
||||
}),
|
||||
);
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
render(<CustomDomainSettings />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
await user.click(
|
||||
screen.getByRole('button', { name: /customize team['’]s url/i }),
|
||||
);
|
||||
|
||||
const input = screen.getByPlaceholderText(/enter domain/i);
|
||||
await user.clear(input);
|
||||
await user.type(input, 'myteam');
|
||||
await user.click(screen.getByRole('button', { name: /apply changes/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(capturedBody).toEqual({ name: 'myteam' });
|
||||
});
|
||||
});
|
||||
|
||||
it('shows contact support option when domain update returns 409', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
rest.put(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(
|
||||
ctx.status(409),
|
||||
ctx.json({ error: { message: 'Already updated today' } }),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
render(<CustomDomainSettings />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
await user.click(
|
||||
screen.getByRole('button', { name: /customize team['’]s url/i }),
|
||||
);
|
||||
await user.type(screen.getByPlaceholderText(/enter domain/i), 'myteam');
|
||||
await user.click(screen.getByRole('button', { name: /apply changes/i }));
|
||||
|
||||
expect(
|
||||
await screen.findByRole('button', { name: /contact support/i }),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -34,6 +34,9 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
const sortedVariablesArray = useDashboardVariablesSelector(
|
||||
(state) => state.sortedVariablesArray,
|
||||
);
|
||||
const dynamicVariableOrder = useDashboardVariablesSelector(
|
||||
(state) => state.dynamicVariableOrder,
|
||||
);
|
||||
const dependencyData = useDashboardVariablesSelector(
|
||||
(state) => state.dependencyData,
|
||||
);
|
||||
@@ -52,10 +55,11 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
}, [getUrlVariables, updateUrlVariable, dashboardVariables]);
|
||||
|
||||
// Memoize the order key to avoid unnecessary triggers
|
||||
const dependencyOrderKey = useMemo(
|
||||
() => dependencyData?.order?.join(',') ?? '',
|
||||
[dependencyData?.order],
|
||||
);
|
||||
const variableOrderKey = useMemo(() => {
|
||||
const queryVariableOrderKey = dependencyData?.order?.join(',') ?? '';
|
||||
const dynamicVariableOrderKey = dynamicVariableOrder?.join(',') ?? '';
|
||||
return `${queryVariableOrderKey}|${dynamicVariableOrderKey}`;
|
||||
}, [dependencyData?.order, dynamicVariableOrder]);
|
||||
|
||||
// Initialize fetch store then start a new fetch cycle.
|
||||
// Runs on dependency order changes, and time range changes.
|
||||
@@ -66,7 +70,7 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
initializeVariableFetchStore(allVariableNames);
|
||||
enqueueFetchOfAllVariables();
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [dependencyOrderKey, minTime, maxTime]);
|
||||
}, [variableOrderKey, minTime, maxTime]);
|
||||
|
||||
// Performance optimization: For dynamic variables with allSelected=true, we don't store
|
||||
// individual values in localStorage since we can always derive them from available options.
|
||||
|
||||
@@ -0,0 +1,203 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { act, render } from '@testing-library/react';
|
||||
import {
|
||||
dashboardVariablesStore,
|
||||
setDashboardVariablesStore,
|
||||
updateDashboardVariablesStore,
|
||||
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
|
||||
import {
|
||||
IDashboardVariables,
|
||||
IDashboardVariablesStoreState,
|
||||
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import {
|
||||
enqueueFetchOfAllVariables,
|
||||
initializeVariableFetchStore,
|
||||
} from 'providers/Dashboard/store/variableFetchStore';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import DashboardVariableSelection from '../DashboardVariableSelection';
|
||||
|
||||
// Mock providers/Dashboard/Dashboard
|
||||
const mockSetSelectedDashboard = jest.fn();
|
||||
const mockUpdateLocalStorageDashboardVariables = jest.fn();
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): Record<string, unknown> => ({
|
||||
setSelectedDashboard: mockSetSelectedDashboard,
|
||||
updateLocalStorageDashboardVariables: mockUpdateLocalStorageDashboardVariables,
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock hooks/dashboard/useVariablesFromUrl
|
||||
const mockUpdateUrlVariable = jest.fn();
|
||||
const mockGetUrlVariables = jest.fn().mockReturnValue({});
|
||||
jest.mock('hooks/dashboard/useVariablesFromUrl', () => ({
|
||||
__esModule: true,
|
||||
default: (): Record<string, unknown> => ({
|
||||
updateUrlVariable: mockUpdateUrlVariable,
|
||||
getUrlVariables: mockGetUrlVariables,
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock variableFetchStore functions
|
||||
jest.mock('providers/Dashboard/store/variableFetchStore', () => ({
|
||||
initializeVariableFetchStore: jest.fn(),
|
||||
enqueueFetchOfAllVariables: jest.fn(),
|
||||
enqueueDescendantsOfVariable: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock initializeDefaultVariables
|
||||
jest.mock('providers/Dashboard/initializeDefaultVariables', () => ({
|
||||
initializeDefaultVariables: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock react-redux useSelector for globalTime
|
||||
jest.mock('react-redux', () => ({
|
||||
...jest.requireActual('react-redux'),
|
||||
useSelector: jest.fn().mockReturnValue({ minTime: 1000, maxTime: 2000 }),
|
||||
}));
|
||||
|
||||
// Mock VariableItem to avoid rendering complexity
|
||||
jest.mock('../VariableItem', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => <div data-testid="variable-item" />,
|
||||
}));
|
||||
|
||||
function createVariable(
|
||||
overrides: Partial<IDashboardVariable> = {},
|
||||
): IDashboardVariable {
|
||||
return {
|
||||
id: 'test-id',
|
||||
name: 'test-var',
|
||||
description: '',
|
||||
type: 'QUERY',
|
||||
sort: 'DISABLED',
|
||||
showALLOption: false,
|
||||
multiSelect: false,
|
||||
order: 0,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function resetStore(): void {
|
||||
dashboardVariablesStore.set(() => ({
|
||||
dashboardId: '',
|
||||
variables: {},
|
||||
sortedVariablesArray: [],
|
||||
dependencyData: null,
|
||||
variableTypes: {},
|
||||
dynamicVariableOrder: [],
|
||||
}));
|
||||
}
|
||||
|
||||
describe('DashboardVariableSelection', () => {
|
||||
beforeEach(() => {
|
||||
resetStore();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should call initializeVariableFetchStore and enqueueFetchOfAllVariables on mount', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
expect(initializeVariableFetchStore).toHaveBeenCalledWith(['env']);
|
||||
expect(enqueueFetchOfAllVariables).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should re-trigger fetch cycle when dynamicVariableOrder changes', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
// Clear mocks after initial render
|
||||
(initializeVariableFetchStore as jest.Mock).mockClear();
|
||||
(enqueueFetchOfAllVariables as jest.Mock).mockClear();
|
||||
|
||||
// Add a DYNAMIC variable which changes dynamicVariableOrder
|
||||
act(() => {
|
||||
updateDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
expect(initializeVariableFetchStore).toHaveBeenCalledWith(
|
||||
expect.arrayContaining(['env', 'dyn1']),
|
||||
);
|
||||
expect(enqueueFetchOfAllVariables).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should re-trigger fetch cycle when a dynamic variable is removed', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
dyn2: createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 2 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
(initializeVariableFetchStore as jest.Mock).mockClear();
|
||||
(enqueueFetchOfAllVariables as jest.Mock).mockClear();
|
||||
|
||||
// Remove dyn2, changing dynamicVariableOrder from ['dyn1','dyn2'] to ['dyn1']
|
||||
act(() => {
|
||||
updateDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
expect(initializeVariableFetchStore).toHaveBeenCalledWith(['env', 'dyn1']);
|
||||
expect(enqueueFetchOfAllVariables).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should NOT re-trigger fetch cycle when dynamicVariableOrder stays the same', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
(initializeVariableFetchStore as jest.Mock).mockClear();
|
||||
(enqueueFetchOfAllVariables as jest.Mock).mockClear();
|
||||
|
||||
// Update a non-dynamic variable's selectedValue — dynamicVariableOrder unchanged
|
||||
act(() => {
|
||||
const snapshot = dashboardVariablesStore.getSnapshot();
|
||||
dashboardVariablesStore.set(
|
||||
(): IDashboardVariablesStoreState => ({
|
||||
...snapshot,
|
||||
variables: {
|
||||
...snapshot.variables,
|
||||
env: {
|
||||
...snapshot.variables.env,
|
||||
selectedValue: 'production',
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
expect(initializeVariableFetchStore).not.toHaveBeenCalled();
|
||||
expect(enqueueFetchOfAllVariables).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -1,7 +1,6 @@
|
||||
import { useCallback } from 'react';
|
||||
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
|
||||
import HistogramTooltip from 'lib/uPlotV2/components/Tooltip/HistogramTooltip';
|
||||
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
|
||||
import {
|
||||
HistogramTooltipProps,
|
||||
TooltipRenderArgs,
|
||||
@@ -22,21 +21,11 @@ export default function Histogram(props: HistogramChartProps): JSX.Element {
|
||||
if (customRenderTooltip) {
|
||||
return customRenderTooltip(props);
|
||||
}
|
||||
const content = buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: rest.yAxisUnit ?? '',
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
});
|
||||
const tooltipProps: HistogramTooltipProps = {
|
||||
...props,
|
||||
timezone: rest.timezone,
|
||||
yAxisUnit: rest.yAxisUnit,
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
content,
|
||||
};
|
||||
return <HistogramTooltip {...tooltipProps} />;
|
||||
},
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { useCallback } from 'react';
|
||||
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
|
||||
import TimeSeriesTooltip from 'lib/uPlotV2/components/Tooltip/TimeSeriesTooltip';
|
||||
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
|
||||
import {
|
||||
TimeSeriesTooltipProps,
|
||||
TooltipRenderArgs,
|
||||
@@ -17,21 +16,11 @@ export default function TimeSeries(props: TimeSeriesChartProps): JSX.Element {
|
||||
if (customRenderTooltip) {
|
||||
return customRenderTooltip(props);
|
||||
}
|
||||
const content = buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: rest.yAxisUnit ?? '',
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
});
|
||||
const tooltipProps: TimeSeriesTooltipProps = {
|
||||
...props,
|
||||
timezone: rest.timezone,
|
||||
yAxisUnit: rest.yAxisUnit,
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
content,
|
||||
};
|
||||
return <TimeSeriesTooltip {...tooltipProps} />;
|
||||
},
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import { Button, Skeleton, Tag, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { useGetHosts } from 'api/generated/services/zeus';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
|
||||
import history from 'lib/history';
|
||||
import { Globe, Link2 } from 'lucide-react';
|
||||
import { Link2 } from 'lucide-react';
|
||||
import Card from 'periscope/components/Card/Card';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { LicensePlatform } from 'types/api/licensesV3/getActive';
|
||||
@@ -26,36 +26,21 @@ function DataSourceInfo({
|
||||
const isEnabled =
|
||||
activeLicense && activeLicense.platform === LicensePlatform.CLOUD;
|
||||
|
||||
const {
|
||||
data: deploymentsData,
|
||||
isError: isErrorDeploymentsData,
|
||||
} = useGetDeploymentsData(isEnabled || false);
|
||||
const { data: hostsData, isError } = useGetHosts({
|
||||
query: { enabled: isEnabled || false },
|
||||
});
|
||||
|
||||
const [region, setRegion] = useState<string>('');
|
||||
const [url, setUrl] = useState<string>('');
|
||||
|
||||
useEffect(() => {
|
||||
if (deploymentsData) {
|
||||
switch (deploymentsData?.data.data.cluster.region.name) {
|
||||
case 'in':
|
||||
setRegion('India');
|
||||
break;
|
||||
case 'us':
|
||||
setRegion('United States');
|
||||
break;
|
||||
case 'eu':
|
||||
setRegion('Europe');
|
||||
break;
|
||||
default:
|
||||
setRegion(deploymentsData?.data.data.cluster.region.name);
|
||||
break;
|
||||
if (hostsData) {
|
||||
const defaultHost = hostsData?.data?.data?.hosts?.find((h) => h.is_default);
|
||||
if (defaultHost?.url) {
|
||||
const url = defaultHost?.url?.split('://')[1] ?? '';
|
||||
setUrl(url);
|
||||
}
|
||||
|
||||
setUrl(
|
||||
`${deploymentsData?.data.data.name}.${deploymentsData?.data.data.cluster.region.dns}`,
|
||||
);
|
||||
}
|
||||
}, [deploymentsData]);
|
||||
}, [hostsData]);
|
||||
|
||||
const renderNotSendingData = (): JSX.Element => (
|
||||
<>
|
||||
@@ -123,14 +108,8 @@ function DataSourceInfo({
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{!isErrorDeploymentsData && deploymentsData && (
|
||||
{!isError && hostsData && (
|
||||
<div className="workspace-details">
|
||||
<div className="workspace-region">
|
||||
<Globe size={10} />
|
||||
|
||||
<Typography>{region}</Typography>
|
||||
</div>
|
||||
|
||||
<div className="workspace-url">
|
||||
<Link2 size={12} />
|
||||
|
||||
@@ -156,17 +135,11 @@ function DataSourceInfo({
|
||||
Hello there, Welcome to your SigNoz workspace
|
||||
</Typography>
|
||||
|
||||
{!isErrorDeploymentsData && deploymentsData && (
|
||||
{!isError && hostsData && (
|
||||
<Card className="welcome-card">
|
||||
<Card.Content>
|
||||
<div className="workspace-ready-container">
|
||||
<div className="workspace-details">
|
||||
<div className="workspace-region">
|
||||
<Globe size={10} />
|
||||
|
||||
<Typography>{region}</Typography>
|
||||
</div>
|
||||
|
||||
<div className="workspace-url">
|
||||
<Link2 size={12} />
|
||||
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
import { GetHosts200 } from 'api/generated/services/sigNoz.schemas';
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
|
||||
import DataSourceInfo from '../DataSourceInfo';
|
||||
|
||||
const ZEUS_HOSTS_ENDPOINT = '*/api/v2/zeus/hosts';
|
||||
|
||||
const mockHostsResponse: GetHosts200 = {
|
||||
status: 'success',
|
||||
data: {
|
||||
name: 'accepted-starfish',
|
||||
state: 'HEALTHY',
|
||||
tier: 'PREMIUM',
|
||||
hosts: [
|
||||
{
|
||||
name: 'accepted-starfish',
|
||||
is_default: true,
|
||||
url: 'https://accepted-starfish.test.cloud',
|
||||
},
|
||||
{
|
||||
name: 'custom-host',
|
||||
is_default: false,
|
||||
url: 'https://custom-host.test.cloud',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
describe('DataSourceInfo', () => {
|
||||
afterEach(() => server.resetHandlers());
|
||||
|
||||
it('renders the default workspace URL with protocol stripped', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<DataSourceInfo dataSentToSigNoz={false} isLoading={false} />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
});
|
||||
|
||||
it('does not render workspace URL when GET /zeus/hosts fails', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(500), ctx.json({})),
|
||||
),
|
||||
);
|
||||
|
||||
render(<DataSourceInfo dataSentToSigNoz={false} isLoading={false} />);
|
||||
|
||||
await screen.findByText(/Your workspace is ready/i);
|
||||
expect(screen.queryByText(/signoz\.cloud/i)).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders workspace URL in the data-received view when telemetry is flowing', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<DataSourceInfo dataSentToSigNoz={true} isLoading={false} />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
});
|
||||
});
|
||||
@@ -36,24 +36,6 @@ jest.mock('react-router-dom', () => {
|
||||
};
|
||||
});
|
||||
|
||||
// Mock deployments data hook to avoid unrelated network calls in this page
|
||||
jest.mock(
|
||||
'hooks/CustomDomain/useGetDeploymentsData',
|
||||
(): Record<string, unknown> => ({
|
||||
useGetDeploymentsData: (): {
|
||||
data: undefined;
|
||||
isLoading: boolean;
|
||||
isFetching: boolean;
|
||||
isError: boolean;
|
||||
} => ({
|
||||
data: undefined,
|
||||
isLoading: false,
|
||||
isFetching: false,
|
||||
isError: false,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
const TEST_CREATED_UPDATED = '2024-01-01T00:00:00Z';
|
||||
const TEST_EXPIRES_AT = '2030-01-01T00:00:00Z';
|
||||
const TEST_WORKSPACE_ID = 'w1';
|
||||
|
||||
@@ -26,7 +26,7 @@ jest.mock('lib/history', () => ({
|
||||
// API Endpoints
|
||||
const ORG_PREFERENCES_ENDPOINT = '*/api/v1/org/preferences/list';
|
||||
const UPDATE_ORG_PREFERENCE_ENDPOINT = '*/api/v1/org/preferences/name/update';
|
||||
const UPDATE_PROFILE_ENDPOINT = '*/api/gateway/v2/profiles/me';
|
||||
const UPDATE_PROFILE_ENDPOINT = '*/api/v2/zeus/profiles';
|
||||
const EDIT_ORG_ENDPOINT = '*/api/v2/orgs/me';
|
||||
const INVITE_USERS_ENDPOINT = '*/api/v1/invite/bulk/create';
|
||||
|
||||
@@ -277,6 +277,46 @@ describe('OnboardingQuestionaire Component', () => {
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('fires PUT to /zeus/profiles and advances to step 4 on success', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
let profilePutCalled = false;
|
||||
|
||||
server.use(
|
||||
rest.put(UPDATE_PROFILE_ENDPOINT, (_, res, ctx) => {
|
||||
profilePutCalled = true;
|
||||
return res(ctx.status(200), ctx.json({ status: 'success', data: {} }));
|
||||
}),
|
||||
);
|
||||
|
||||
render(<OnboardingQuestionaire />);
|
||||
|
||||
// Navigate to step 3
|
||||
await user.click(screen.getByLabelText(/datadog/i));
|
||||
await user.click(screen.getByRole('radio', { name: /yes/i }));
|
||||
await user.click(screen.getByLabelText(/just exploring/i));
|
||||
await user.click(screen.getByRole('button', { name: /next/i }));
|
||||
|
||||
await user.type(
|
||||
await screen.findByPlaceholderText(/e\.g\., googling/i),
|
||||
'Found via Google',
|
||||
);
|
||||
await user.click(screen.getByLabelText(/lowering observability costs/i));
|
||||
await user.click(screen.getByRole('button', { name: /next/i }));
|
||||
|
||||
// Click "I'll do this later" on step 3 — triggers PUT /zeus/profiles
|
||||
await user.click(
|
||||
await screen.findByRole('button', { name: /i'll do this later/i }),
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(profilePutCalled).toBe(true);
|
||||
// Step 3 content is gone — successfully advanced to step 4
|
||||
expect(
|
||||
screen.queryByText(/what does your scale approximately look like/i),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('shows do later button', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
render(<OnboardingQuestionaire />);
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
import { toast } from '@signozhq/sonner';
|
||||
import { NotificationInstance } from 'antd/es/notification/interface';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import updateProfileAPI from 'api/onboarding/updateProfile';
|
||||
import { RenderErrorResponseDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { usePutProfile } from 'api/generated/services/zeus';
|
||||
import listOrgPreferences from 'api/v1/org/preferences/list';
|
||||
import updateOrgPreferenceAPI from 'api/v1/org/preferences/name/update';
|
||||
import { AxiosError } from 'axios';
|
||||
@@ -121,20 +123,9 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
optimiseSignozDetails.hostsPerDay === 0 &&
|
||||
optimiseSignozDetails.services === 0;
|
||||
|
||||
const { mutate: updateProfile, isLoading: isUpdatingProfile } = useMutation(
|
||||
updateProfileAPI,
|
||||
{
|
||||
onSuccess: () => {
|
||||
setCurrentStep(4);
|
||||
},
|
||||
onError: (error) => {
|
||||
showErrorNotification(notifications, error as AxiosError);
|
||||
|
||||
// Allow user to proceed even if API fails
|
||||
setCurrentStep(4);
|
||||
},
|
||||
},
|
||||
);
|
||||
const { mutate: updateProfile, isLoading: isUpdatingProfile } = usePutProfile<
|
||||
AxiosError<RenderErrorResponseDTO>
|
||||
>();
|
||||
|
||||
const { mutate: updateOrgPreference } = useMutation(updateOrgPreferenceAPI, {
|
||||
onSuccess: () => {
|
||||
@@ -153,29 +144,44 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
nextPageID: 4,
|
||||
});
|
||||
|
||||
updateProfile({
|
||||
uses_otel: orgDetails?.usesOtel as boolean,
|
||||
has_existing_observability_tool: orgDetails?.usesObservability as boolean,
|
||||
existing_observability_tool:
|
||||
orgDetails?.observabilityTool === 'Others'
|
||||
? (orgDetails?.otherTool as string)
|
||||
: (orgDetails?.observabilityTool as string),
|
||||
where_did_you_discover_signoz: signozDetails?.discoverSignoz as string,
|
||||
timeline_for_migrating_to_signoz: orgDetails?.migrationTimeline as string,
|
||||
reasons_for_interest_in_signoz: signozDetails?.interestInSignoz?.includes(
|
||||
'Others',
|
||||
)
|
||||
? ([
|
||||
...(signozDetails?.interestInSignoz?.filter(
|
||||
(item) => item !== 'Others',
|
||||
) || []),
|
||||
signozDetails?.otherInterestInSignoz,
|
||||
] as string[])
|
||||
: (signozDetails?.interestInSignoz as string[]),
|
||||
logs_scale_per_day_in_gb: optimiseSignozDetails?.logsPerDay as number,
|
||||
number_of_hosts: optimiseSignozDetails?.hostsPerDay as number,
|
||||
number_of_services: optimiseSignozDetails?.services as number,
|
||||
});
|
||||
updateProfile(
|
||||
{
|
||||
data: {
|
||||
uses_otel: orgDetails?.usesOtel as boolean,
|
||||
has_existing_observability_tool: orgDetails?.usesObservability as boolean,
|
||||
existing_observability_tool:
|
||||
orgDetails?.observabilityTool === 'Others'
|
||||
? (orgDetails?.otherTool as string)
|
||||
: (orgDetails?.observabilityTool as string),
|
||||
where_did_you_discover_signoz: signozDetails?.discoverSignoz as string,
|
||||
timeline_for_migrating_to_signoz: orgDetails?.migrationTimeline as string,
|
||||
reasons_for_interest_in_signoz: signozDetails?.interestInSignoz?.includes(
|
||||
'Others',
|
||||
)
|
||||
? ([
|
||||
...(signozDetails?.interestInSignoz?.filter(
|
||||
(item) => item !== 'Others',
|
||||
) || []),
|
||||
signozDetails?.otherInterestInSignoz,
|
||||
] as string[])
|
||||
: (signozDetails?.interestInSignoz as string[]),
|
||||
logs_scale_per_day_in_gb: optimiseSignozDetails?.logsPerDay as number,
|
||||
number_of_hosts: optimiseSignozDetails?.hostsPerDay as number,
|
||||
number_of_services: optimiseSignozDetails?.services as number,
|
||||
},
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
setCurrentStep(4);
|
||||
},
|
||||
onError: (error: any) => {
|
||||
toast.error(error?.message || SOMETHING_WENT_WRONG);
|
||||
|
||||
// Allow user to proceed even if API fails
|
||||
setCurrentStep(4);
|
||||
},
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
const handleOnboardingComplete = (): void => {
|
||||
|
||||
@@ -3,6 +3,7 @@ import { MemoryRouter, Route } from 'react-router-dom';
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
|
||||
import { AppProvider } from 'providers/App/App';
|
||||
import MockQueryClientProvider from 'providers/test/MockQueryClientProvider';
|
||||
import { Span } from 'types/api/trace/getTraceV2';
|
||||
@@ -108,7 +109,7 @@ const createMockSpan = (): Span => ({
|
||||
statusMessage: '',
|
||||
tagMap: {
|
||||
'http.method': 'GET',
|
||||
'http.url': '/api/users?page=1',
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: '/api/users?page=1',
|
||||
'http.status_code': '200',
|
||||
'service.name': 'frontend-service',
|
||||
'span.kind': 'server',
|
||||
|
||||
@@ -5,6 +5,7 @@ import getSpanPercentiles from 'api/trace/getSpanPercentiles';
|
||||
import getUserPreference from 'api/v1/user/preferences/name/get';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { QueryBuilderContext } from 'providers/QueryBuilder';
|
||||
@@ -878,7 +879,9 @@ describe('SpanDetailsDrawer', () => {
|
||||
|
||||
// Verify only matching attributes are shown (use getAllByText for all since they appear in multiple places)
|
||||
expect(screen.getAllByText('http.method').length).toBeGreaterThan(0);
|
||||
expect(screen.getAllByText('http.url').length).toBeGreaterThan(0);
|
||||
expect(screen.getAllByText(SPAN_ATTRIBUTES.HTTP_URL).length).toBeGreaterThan(
|
||||
0,
|
||||
);
|
||||
expect(screen.getAllByText('http.status_code').length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
@@ -1126,7 +1129,7 @@ describe('SpanDetailsDrawer - Search Visibility User Flows', () => {
|
||||
|
||||
// User sees all attributes initially
|
||||
expect(screen.getByText('http.method')).toBeInTheDocument();
|
||||
expect(screen.getByText('http.url')).toBeInTheDocument();
|
||||
expect(screen.getByText(SPAN_ATTRIBUTES.HTTP_URL)).toBeInTheDocument();
|
||||
expect(screen.getByText('http.status_code')).toBeInTheDocument();
|
||||
|
||||
// User types "method" in search
|
||||
@@ -1136,7 +1139,7 @@ describe('SpanDetailsDrawer - Search Visibility User Flows', () => {
|
||||
// User sees only matching attributes
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('http.method')).toBeInTheDocument();
|
||||
expect(screen.queryByText('http.url')).not.toBeInTheDocument();
|
||||
expect(screen.queryByText(SPAN_ATTRIBUTES.HTTP_URL)).not.toBeInTheDocument();
|
||||
expect(screen.queryByText('http.status_code')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
import { Span } from 'types/api/trace/getTraceV2';
|
||||
|
||||
@@ -22,7 +23,7 @@ export const mockSpan: Span = {
|
||||
event: [],
|
||||
tagMap: {
|
||||
'http.method': 'GET',
|
||||
'http.url': '/api/test',
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: '/api/test',
|
||||
'http.status_code': '200',
|
||||
},
|
||||
hasError: false,
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
import { useQuery, UseQueryResult } from 'react-query';
|
||||
import { getDeploymentsData } from 'api/customDomain/getDeploymentsData';
|
||||
import { AxiosError, AxiosResponse } from 'axios';
|
||||
import { DeploymentsDataProps } from 'types/api/customDomain/types';
|
||||
|
||||
export const useGetDeploymentsData = (
|
||||
isEnabled: boolean,
|
||||
): UseQueryResult<AxiosResponse<DeploymentsDataProps>, AxiosError> =>
|
||||
useQuery<AxiosResponse<DeploymentsDataProps>, AxiosError>({
|
||||
queryKey: ['getDeploymentsData'],
|
||||
queryFn: () => getDeploymentsData(),
|
||||
enabled: isEnabled,
|
||||
});
|
||||
@@ -1,15 +0,0 @@
|
||||
import { useQuery, UseQueryResult } from 'react-query';
|
||||
import { getAllIngestionKeys } from 'api/IngestionKeys/getAllIngestionKeys';
|
||||
import { AxiosError, AxiosResponse } from 'axios';
|
||||
import {
|
||||
AllIngestionKeyProps,
|
||||
GetIngestionKeyProps,
|
||||
} from 'types/api/ingestionKeys/types';
|
||||
|
||||
export const useGetAllIngestionsKeys = (
|
||||
props: GetIngestionKeyProps,
|
||||
): UseQueryResult<AxiosResponse<AllIngestionKeyProps>, AxiosError> =>
|
||||
useQuery<AxiosResponse<AllIngestionKeyProps>, AxiosError>({
|
||||
queryKey: [`IngestionKeys-${props.page}-${props.search}`],
|
||||
queryFn: () => getAllIngestionKeys(props),
|
||||
});
|
||||
@@ -1,8 +1,31 @@
|
||||
import { HistogramTooltipProps } from '../types';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import { HistogramTooltipProps, TooltipContentItem } from '../types';
|
||||
import Tooltip from './Tooltip';
|
||||
import { buildTooltipContent } from './utils';
|
||||
|
||||
export default function HistogramTooltip(
|
||||
props: HistogramTooltipProps,
|
||||
): JSX.Element {
|
||||
return <Tooltip {...props} showTooltipHeader={false} />;
|
||||
const content = useMemo(
|
||||
(): TooltipContentItem[] =>
|
||||
buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: props.yAxisUnit ?? '',
|
||||
decimalPrecision: props.decimalPrecision,
|
||||
}),
|
||||
[
|
||||
props.uPlotInstance,
|
||||
props.seriesIndex,
|
||||
props.dataIndexes,
|
||||
props.yAxisUnit,
|
||||
props.decimalPrecision,
|
||||
],
|
||||
);
|
||||
|
||||
return <Tooltip {...props} content={content} showTooltipHeader={false} />;
|
||||
}
|
||||
|
||||
@@ -1,8 +1,31 @@
|
||||
import { TimeSeriesTooltipProps } from '../types';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import { TimeSeriesTooltipProps, TooltipContentItem } from '../types';
|
||||
import Tooltip from './Tooltip';
|
||||
import { buildTooltipContent } from './utils';
|
||||
|
||||
export default function TimeSeriesTooltip(
|
||||
props: TimeSeriesTooltipProps,
|
||||
): JSX.Element {
|
||||
return <Tooltip {...props} />;
|
||||
const content = useMemo(
|
||||
(): TooltipContentItem[] =>
|
||||
buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: props.yAxisUnit ?? '',
|
||||
decimalPrecision: props.decimalPrecision,
|
||||
}),
|
||||
[
|
||||
props.uPlotInstance,
|
||||
props.seriesIndex,
|
||||
props.dataIndexes,
|
||||
props.yAxisUnit,
|
||||
props.decimalPrecision,
|
||||
],
|
||||
);
|
||||
|
||||
return <Tooltip {...props} content={content} />;
|
||||
}
|
||||
|
||||
@@ -28,18 +28,23 @@
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.uplot-tooltip-list {
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
}
|
||||
.uplot-tooltip-list-container {
|
||||
overflow-y: auto;
|
||||
max-height: 330px;
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
.uplot-tooltip-list {
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-slate-100);
|
||||
border-radius: 0.5rem;
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-slate-100);
|
||||
border-radius: 0.5rem;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useMemo } from 'react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { Virtuoso } from 'react-virtuoso';
|
||||
import cx from 'classnames';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
@@ -11,6 +11,7 @@ import './Tooltip.styles.scss';
|
||||
|
||||
const TOOLTIP_LIST_MAX_HEIGHT = 330;
|
||||
const TOOLTIP_ITEM_HEIGHT = 38;
|
||||
const TOOLTIP_LIST_PADDING = 10;
|
||||
|
||||
export default function Tooltip({
|
||||
uPlotInstance,
|
||||
@@ -19,7 +20,7 @@ export default function Tooltip({
|
||||
showTooltipHeader = true,
|
||||
}: TooltipProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const [listHeight, setListHeight] = useState(0);
|
||||
const tooltipContent = content ?? [];
|
||||
|
||||
const headerTitle = useMemo(() => {
|
||||
@@ -41,42 +42,52 @@ export default function Tooltip({
|
||||
showTooltipHeader,
|
||||
]);
|
||||
|
||||
const virtuosoStyle = useMemo(() => {
|
||||
return {
|
||||
height:
|
||||
listHeight > 0
|
||||
? Math.min(listHeight + TOOLTIP_LIST_PADDING, TOOLTIP_LIST_MAX_HEIGHT)
|
||||
: Math.min(
|
||||
tooltipContent.length * TOOLTIP_ITEM_HEIGHT,
|
||||
TOOLTIP_LIST_MAX_HEIGHT,
|
||||
),
|
||||
width: '100%',
|
||||
};
|
||||
}, [listHeight, tooltipContent.length]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cx(
|
||||
'uplot-tooltip-container',
|
||||
isDarkMode ? 'darkMode' : 'lightMode',
|
||||
)}
|
||||
data-testid="uplot-tooltip-container"
|
||||
>
|
||||
{showTooltipHeader && (
|
||||
<div className="uplot-tooltip-header">
|
||||
<div className="uplot-tooltip-header" data-testid="uplot-tooltip-header">
|
||||
<span>{headerTitle}</span>
|
||||
</div>
|
||||
)}
|
||||
<div
|
||||
style={{
|
||||
height: Math.min(
|
||||
tooltipContent.length * TOOLTIP_ITEM_HEIGHT,
|
||||
TOOLTIP_LIST_MAX_HEIGHT,
|
||||
),
|
||||
minHeight: 0,
|
||||
}}
|
||||
>
|
||||
<div className="uplot-tooltip-list-container">
|
||||
{tooltipContent.length > 0 ? (
|
||||
<Virtuoso
|
||||
className="uplot-tooltip-list"
|
||||
data-testid="uplot-tooltip-list"
|
||||
data={tooltipContent}
|
||||
defaultItemHeight={TOOLTIP_ITEM_HEIGHT}
|
||||
style={virtuosoStyle}
|
||||
totalListHeightChanged={setListHeight}
|
||||
itemContent={(_, item): JSX.Element => (
|
||||
<div className="uplot-tooltip-item">
|
||||
<div className="uplot-tooltip-item" data-testid="uplot-tooltip-item">
|
||||
<div
|
||||
className="uplot-tooltip-item-marker"
|
||||
style={{ borderColor: item.color }}
|
||||
data-is-legend-marker={true}
|
||||
data-testid="uplot-tooltip-item-marker"
|
||||
/>
|
||||
<div
|
||||
className="uplot-tooltip-item-content"
|
||||
style={{ color: item.color, fontWeight: item.isActive ? 700 : 400 }}
|
||||
data-testid="uplot-tooltip-item-content"
|
||||
>
|
||||
{item.label}: {item.tooltipValue}
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,208 @@
|
||||
import React from 'react';
|
||||
import { VirtuosoMockContext } from 'react-virtuoso';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import dayjs from 'dayjs';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { render, RenderResult, screen } from 'tests/test-utils';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
import { TooltipContentItem } from '../../types';
|
||||
import Tooltip from '../Tooltip';
|
||||
|
||||
type MockVirtuosoProps = {
|
||||
data: TooltipContentItem[];
|
||||
itemContent: (index: number, item: TooltipContentItem) => React.ReactNode;
|
||||
className?: string;
|
||||
style?: React.CSSProperties;
|
||||
totalListHeightChanged?: (height: number) => void;
|
||||
'data-testid'?: string;
|
||||
};
|
||||
|
||||
let mockTotalListHeight = 200;
|
||||
|
||||
jest.mock('react-virtuoso', () => {
|
||||
const actual = jest.requireActual('react-virtuoso');
|
||||
|
||||
return {
|
||||
...actual,
|
||||
Virtuoso: ({
|
||||
data,
|
||||
itemContent,
|
||||
className,
|
||||
style,
|
||||
totalListHeightChanged,
|
||||
'data-testid': dataTestId,
|
||||
}: MockVirtuosoProps): JSX.Element => {
|
||||
if (totalListHeightChanged) {
|
||||
// Simulate Virtuoso reporting total list height
|
||||
totalListHeightChanged(mockTotalListHeight);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={className} style={style} data-testid={dataTestId}>
|
||||
{data.map((item, index) => (
|
||||
<div key={item.label ?? index.toString()}>{itemContent(index, item)}</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('hooks/useDarkMode', () => ({
|
||||
useIsDarkMode: jest.fn(),
|
||||
}));
|
||||
|
||||
const mockUseIsDarkMode = useIsDarkMode as jest.MockedFunction<
|
||||
typeof useIsDarkMode
|
||||
>;
|
||||
|
||||
type TooltipTestProps = React.ComponentProps<typeof Tooltip>;
|
||||
|
||||
function createTooltipContent(
|
||||
overrides: Partial<TooltipContentItem> = {},
|
||||
): TooltipContentItem {
|
||||
return {
|
||||
label: 'Series A',
|
||||
value: 10,
|
||||
tooltipValue: '10',
|
||||
color: '#ff0000',
|
||||
isActive: true,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createUPlotInstance(cursorIdx: number | null): uPlot {
|
||||
return ({
|
||||
data: [[1], []],
|
||||
cursor: { idx: cursorIdx },
|
||||
// The rest of the uPlot fields are not used by Tooltip
|
||||
} as unknown) as uPlot;
|
||||
}
|
||||
|
||||
function renderTooltip(props: Partial<TooltipTestProps> = {}): RenderResult {
|
||||
const defaultProps: TooltipTestProps = {
|
||||
uPlotInstance: createUPlotInstance(null),
|
||||
timezone: 'UTC',
|
||||
content: [],
|
||||
showTooltipHeader: true,
|
||||
// TooltipRenderArgs (not used directly in component but required by type)
|
||||
dataIndexes: [],
|
||||
seriesIndex: null,
|
||||
isPinned: false,
|
||||
dismiss: jest.fn(),
|
||||
viaSync: false,
|
||||
} as TooltipTestProps;
|
||||
|
||||
return render(
|
||||
<VirtuosoMockContext.Provider value={{ viewportHeight: 300, itemHeight: 38 }}>
|
||||
<Tooltip {...defaultProps} {...props} />
|
||||
</VirtuosoMockContext.Provider>,
|
||||
);
|
||||
}
|
||||
|
||||
describe('Tooltip', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockUseIsDarkMode.mockReturnValue(false);
|
||||
mockTotalListHeight = 200;
|
||||
});
|
||||
|
||||
it('renders header title when showTooltipHeader is true and cursor index is present', () => {
|
||||
const uPlotInstance = createUPlotInstance(0);
|
||||
|
||||
renderTooltip({ uPlotInstance });
|
||||
|
||||
const expectedTitle = dayjs(1 * 1000)
|
||||
.tz('UTC')
|
||||
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
|
||||
|
||||
expect(screen.getByText(expectedTitle)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not render header when showTooltipHeader is false', () => {
|
||||
const uPlotInstance = createUPlotInstance(0);
|
||||
|
||||
renderTooltip({ uPlotInstance, showTooltipHeader: false });
|
||||
|
||||
const unexpectedTitle = dayjs(1 * 1000)
|
||||
.tz('UTC')
|
||||
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
|
||||
|
||||
expect(screen.queryByText(unexpectedTitle)).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders lightMode class when dark mode is disabled', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
mockUseIsDarkMode.mockReturnValue(false);
|
||||
|
||||
renderTooltip({ uPlotInstance });
|
||||
|
||||
const container = screen.getByTestId('uplot-tooltip-container');
|
||||
|
||||
expect(container).toHaveClass('lightMode');
|
||||
expect(container).not.toHaveClass('darkMode');
|
||||
});
|
||||
|
||||
it('renders darkMode class when dark mode is enabled', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
mockUseIsDarkMode.mockReturnValue(true);
|
||||
|
||||
renderTooltip({ uPlotInstance });
|
||||
|
||||
const container = screen.getByTestId('uplot-tooltip-container');
|
||||
|
||||
expect(container).toHaveClass('darkMode');
|
||||
expect(container).not.toHaveClass('lightMode');
|
||||
});
|
||||
|
||||
it('renders tooltip items when content is provided', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
const content = [createTooltipContent()];
|
||||
|
||||
renderTooltip({ uPlotInstance, content });
|
||||
|
||||
const list = screen.queryByTestId('uplot-tooltip-list');
|
||||
|
||||
expect(list).not.toBeNull();
|
||||
|
||||
const marker = screen.getByTestId('uplot-tooltip-item-marker');
|
||||
const itemContent = screen.getByTestId('uplot-tooltip-item-content');
|
||||
|
||||
expect(marker).toHaveStyle({ borderColor: '#ff0000' });
|
||||
expect(itemContent).toHaveStyle({ color: '#ff0000', fontWeight: '700' });
|
||||
expect(itemContent).toHaveTextContent('Series A: 10');
|
||||
});
|
||||
|
||||
it('does not render tooltip list when content is empty', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
|
||||
renderTooltip({ uPlotInstance, content: [] });
|
||||
|
||||
const list = screen.queryByTestId('uplot-tooltip-list');
|
||||
|
||||
expect(list).toBeNull();
|
||||
});
|
||||
|
||||
it('sets tooltip list height based on content length, height returned by Virtuoso', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
const content = [createTooltipContent(), createTooltipContent()];
|
||||
|
||||
renderTooltip({ uPlotInstance, content });
|
||||
|
||||
const list = screen.getByTestId('uplot-tooltip-list');
|
||||
expect(list).toHaveStyle({ height: '210px' });
|
||||
});
|
||||
|
||||
it('sets tooltip list height based on content length when Virtuoso reports 0 height', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
const content = [createTooltipContent(), createTooltipContent()];
|
||||
mockTotalListHeight = 0;
|
||||
|
||||
renderTooltip({ uPlotInstance, content });
|
||||
|
||||
const list = screen.getByTestId('uplot-tooltip-list');
|
||||
// Falls back to content length: 2 items * 38px = 76px
|
||||
expect(list).toHaveStyle({ height: '76px' });
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,277 @@
|
||||
import { PrecisionOption } from 'components/Graph/types';
|
||||
import { getToolTipValue } from 'components/Graph/yAxisConfig';
|
||||
import uPlot, { AlignedData, Series } from 'uplot';
|
||||
|
||||
import { TooltipContentItem } from '../../types';
|
||||
import {
|
||||
buildTooltipContent,
|
||||
FALLBACK_SERIES_COLOR,
|
||||
getTooltipBaseValue,
|
||||
resolveSeriesColor,
|
||||
} from '../utils';
|
||||
|
||||
jest.mock('components/Graph/yAxisConfig', () => ({
|
||||
getToolTipValue: jest.fn(),
|
||||
}));
|
||||
|
||||
const mockGetToolTipValue = getToolTipValue as jest.MockedFunction<
|
||||
typeof getToolTipValue
|
||||
>;
|
||||
|
||||
function createUPlotInstance(): uPlot {
|
||||
return ({
|
||||
data: [],
|
||||
cursor: { idx: 0 },
|
||||
} as unknown) as uPlot;
|
||||
}
|
||||
|
||||
describe('Tooltip utils', () => {
|
||||
describe('resolveSeriesColor', () => {
|
||||
it('returns string stroke when provided', () => {
|
||||
const u = createUPlotInstance();
|
||||
const stroke: Series.Stroke = '#ff0000';
|
||||
|
||||
const color = resolveSeriesColor(stroke, u, 1);
|
||||
|
||||
expect(color).toBe('#ff0000');
|
||||
});
|
||||
|
||||
it('returns result of stroke function when provided', () => {
|
||||
const u = createUPlotInstance();
|
||||
const strokeFn: Series.Stroke = (uInstance, seriesIdx): string =>
|
||||
`color-${seriesIdx}-${uInstance.cursor.idx}`;
|
||||
|
||||
const color = resolveSeriesColor(strokeFn, u, 2);
|
||||
|
||||
expect(color).toBe('color-2-0');
|
||||
});
|
||||
|
||||
it('returns fallback color when stroke is not provided', () => {
|
||||
const u = createUPlotInstance();
|
||||
|
||||
const color = resolveSeriesColor(undefined, u, 1);
|
||||
|
||||
expect(color).toBe(FALLBACK_SERIES_COLOR);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTooltipBaseValue', () => {
|
||||
it('returns value from aligned data for non-stacked charts', () => {
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[10, 20],
|
||||
];
|
||||
|
||||
const result = getTooltipBaseValue({
|
||||
data,
|
||||
index: 1,
|
||||
dataIndex: 1,
|
||||
isStackedBarChart: false,
|
||||
});
|
||||
|
||||
expect(result).toBe(20);
|
||||
});
|
||||
|
||||
it('returns null when value is missing', () => {
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[10, null],
|
||||
];
|
||||
|
||||
const result = getTooltipBaseValue({
|
||||
data,
|
||||
index: 1,
|
||||
dataIndex: 1,
|
||||
});
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('subtracts next visible stacked value for stacked bar charts', () => {
|
||||
// data[1] and data[2] contain stacked values at dataIndex 1
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[30, 60], // series 1 stacked
|
||||
[10, 20], // series 2 stacked
|
||||
];
|
||||
|
||||
const series: Series[] = [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true } as Series,
|
||||
{ label: 'B', show: true } as Series,
|
||||
];
|
||||
|
||||
const result = getTooltipBaseValue({
|
||||
data,
|
||||
index: 1,
|
||||
dataIndex: 1,
|
||||
isStackedBarChart: true,
|
||||
series,
|
||||
});
|
||||
|
||||
// 60 (stacked at series 1) - 20 (next visible stacked) = 40
|
||||
expect(result).toBe(40);
|
||||
});
|
||||
|
||||
it('skips hidden series when computing base value for stacked charts', () => {
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[30, 60], // series 1 stacked
|
||||
[10, 20], // series 2 stacked but hidden
|
||||
[5, 10], // series 3 stacked and visible
|
||||
];
|
||||
|
||||
const series: Series[] = [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true } as Series,
|
||||
{ label: 'B', show: false } as Series,
|
||||
{ label: 'C', show: true } as Series,
|
||||
];
|
||||
|
||||
const result = getTooltipBaseValue({
|
||||
data,
|
||||
index: 1,
|
||||
dataIndex: 1,
|
||||
isStackedBarChart: true,
|
||||
series,
|
||||
});
|
||||
|
||||
// 60 (stacked at series 1) - 10 (next *visible* stacked, series 3) = 50
|
||||
expect(result).toBe(50);
|
||||
});
|
||||
|
||||
it('does not subtract when there is no next visible series', () => {
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[10, 20], // series 1
|
||||
[5, (null as unknown) as number], // series 2 missing
|
||||
];
|
||||
|
||||
const series: Series[] = [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true } as Series,
|
||||
{ label: 'B', show: false } as Series,
|
||||
];
|
||||
|
||||
const result = getTooltipBaseValue({
|
||||
data,
|
||||
index: 1,
|
||||
dataIndex: 1,
|
||||
isStackedBarChart: true,
|
||||
series,
|
||||
});
|
||||
|
||||
expect(result).toBe(20);
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildTooltipContent', () => {
|
||||
const yAxisUnit = 'ms';
|
||||
const decimalPrecision: PrecisionOption = 2;
|
||||
|
||||
beforeEach(() => {
|
||||
mockGetToolTipValue.mockReset();
|
||||
mockGetToolTipValue.mockImplementation(
|
||||
(value: string | number): string => `formatted-${value}`,
|
||||
);
|
||||
});
|
||||
|
||||
function createSeriesConfig(): Series[] {
|
||||
return [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true, stroke: '#ff0000' } as Series,
|
||||
{
|
||||
label: 'B',
|
||||
show: true,
|
||||
stroke: (_u: uPlot, idx: number): string => `color-${idx}`,
|
||||
} as Series,
|
||||
{ label: 'C', show: false, stroke: '#00ff00' } as Series,
|
||||
];
|
||||
}
|
||||
|
||||
it('builds tooltip content with active series first', () => {
|
||||
const data: AlignedData = [[0], [10], [20], [30]];
|
||||
const series = createSeriesConfig();
|
||||
const dataIndexes = [null, 0, 0, 0];
|
||||
const u = createUPlotInstance();
|
||||
|
||||
const result = buildTooltipContent({
|
||||
data,
|
||||
series,
|
||||
dataIndexes,
|
||||
activeSeriesIndex: 2,
|
||||
uPlotInstance: u,
|
||||
yAxisUnit,
|
||||
decimalPrecision,
|
||||
});
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
// Active (series index 2) should come first
|
||||
expect(result[0]).toMatchObject<Partial<TooltipContentItem>>({
|
||||
label: 'B',
|
||||
value: 20,
|
||||
tooltipValue: 'formatted-20',
|
||||
color: 'color-2',
|
||||
isActive: true,
|
||||
});
|
||||
expect(result[1]).toMatchObject<Partial<TooltipContentItem>>({
|
||||
label: 'A',
|
||||
value: 10,
|
||||
tooltipValue: 'formatted-10',
|
||||
color: '#ff0000',
|
||||
isActive: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('skips series with null data index or non-finite values', () => {
|
||||
const data: AlignedData = [[0], [42], [Infinity]];
|
||||
const series: Series[] = [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true, stroke: '#ff0000' } as Series,
|
||||
{ label: 'B', show: true, stroke: '#00ff00' } as Series,
|
||||
];
|
||||
const dataIndexes = [null, 0, null];
|
||||
const u = createUPlotInstance();
|
||||
|
||||
const result = buildTooltipContent({
|
||||
data,
|
||||
series,
|
||||
dataIndexes,
|
||||
activeSeriesIndex: 1,
|
||||
uPlotInstance: u,
|
||||
yAxisUnit,
|
||||
decimalPrecision,
|
||||
});
|
||||
|
||||
// Only the finite, non-null value from series A should be included
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].label).toBe('A');
|
||||
});
|
||||
|
||||
it('uses stacked base values when building content for stacked bar charts', () => {
|
||||
const data: AlignedData = [[0], [60], [30]];
|
||||
const series: Series[] = [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true, stroke: '#ff0000' } as Series,
|
||||
{ label: 'B', show: true, stroke: '#00ff00' } as Series,
|
||||
];
|
||||
const dataIndexes = [null, 0, 0];
|
||||
const u = createUPlotInstance();
|
||||
|
||||
const result = buildTooltipContent({
|
||||
data,
|
||||
series,
|
||||
dataIndexes,
|
||||
activeSeriesIndex: 1,
|
||||
uPlotInstance: u,
|
||||
yAxisUnit,
|
||||
decimalPrecision,
|
||||
isStackedBarChart: true,
|
||||
});
|
||||
|
||||
// baseValue for series 1 at index 0 should be 60 - 30 (next visible) = 30
|
||||
expect(result[0].value).toBe(30);
|
||||
expect(result[1].value).toBe(30);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -4,7 +4,7 @@ import uPlot, { AlignedData, Series } from 'uplot';
|
||||
|
||||
import { TooltipContentItem } from '../types';
|
||||
|
||||
const FALLBACK_SERIES_COLOR = '#000000';
|
||||
export const FALLBACK_SERIES_COLOR = '#000000';
|
||||
|
||||
export function resolveSeriesColor(
|
||||
stroke: Series.Stroke | undefined,
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
|
||||
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
export const traceDetailResponse = [
|
||||
{
|
||||
@@ -35,7 +37,7 @@ export const traceDetailResponse = [
|
||||
'component',
|
||||
'host.name',
|
||||
'http.method',
|
||||
'http.url',
|
||||
SPAN_ATTRIBUTES.HTTP_URL,
|
||||
'ip',
|
||||
'http.status_code',
|
||||
'opencensus.exporterversion',
|
||||
@@ -84,7 +86,7 @@ export const traceDetailResponse = [
|
||||
'signoz.collector.id',
|
||||
'component',
|
||||
'http.method',
|
||||
'http.url',
|
||||
SPAN_ATTRIBUTES.HTTP_URL,
|
||||
'ip',
|
||||
],
|
||||
[
|
||||
@@ -741,7 +743,7 @@ export const traceDetailResponse = [
|
||||
'component',
|
||||
'http.method',
|
||||
'http.status_code',
|
||||
'http.url',
|
||||
SPAN_ATTRIBUTES.HTTP_URL,
|
||||
'net/http.reused',
|
||||
'net/http.was_idle',
|
||||
'service.name',
|
||||
@@ -833,7 +835,7 @@ export const traceDetailResponse = [
|
||||
'opencensus.exporterversion',
|
||||
'signoz.collector.id',
|
||||
'host.name',
|
||||
'http.url',
|
||||
SPAN_ATTRIBUTES.HTTP_URL,
|
||||
'net/http.reused',
|
||||
'net/http.was_idle',
|
||||
],
|
||||
@@ -916,7 +918,7 @@ export const traceDetailResponse = [
|
||||
'net/http.was_idle',
|
||||
'component',
|
||||
'host.name',
|
||||
'http.url',
|
||||
SPAN_ATTRIBUTES.HTTP_URL,
|
||||
'ip',
|
||||
'service.name',
|
||||
'signoz.collector.id',
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import { Dispatch, SetStateAction, useEffect, useState } from 'react';
|
||||
import { getAttributesValues } from 'api/queryBuilder/getAttributesValues';
|
||||
import { DATA_TYPE_VS_ATTRIBUTE_VALUES_KEY } from 'constants/queryBuilder';
|
||||
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
@@ -31,7 +32,7 @@ export const AllTraceFilterKeyValue: Record<string, string> = {
|
||||
httpRoute: 'HTTP Route',
|
||||
'http.route': 'HTTP Route',
|
||||
httpUrl: 'HTTP URL',
|
||||
'http.url': 'HTTP URL',
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: 'HTTP URL',
|
||||
traceID: 'Trace ID',
|
||||
trace_id: 'Trace ID',
|
||||
} as const;
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
export interface HostsProps {
|
||||
name: string;
|
||||
is_default: boolean;
|
||||
}
|
||||
|
||||
export interface RegionProps {
|
||||
id: string;
|
||||
name: string;
|
||||
category: string;
|
||||
dns: string;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
}
|
||||
|
||||
export interface ClusterProps {
|
||||
id: string;
|
||||
name: string;
|
||||
cloud_account_id: string;
|
||||
cloud_region: string;
|
||||
address: string;
|
||||
region: RegionProps;
|
||||
}
|
||||
|
||||
export interface DeploymentData {
|
||||
id: string;
|
||||
name: string;
|
||||
email: string;
|
||||
state: string;
|
||||
tier: string;
|
||||
user: string;
|
||||
password: string;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
cluster_id: string;
|
||||
hosts: HostsProps[];
|
||||
cluster: ClusterProps;
|
||||
}
|
||||
|
||||
export interface DeploymentsDataProps {
|
||||
status: string;
|
||||
data: DeploymentData;
|
||||
}
|
||||
|
||||
export type PayloadProps = {
|
||||
status: string;
|
||||
data: string;
|
||||
};
|
||||
|
||||
export interface UpdateCustomDomainProps {
|
||||
data: {
|
||||
name: string;
|
||||
};
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
export interface UpdateProfileProps {
|
||||
reasons_for_interest_in_signoz: string[];
|
||||
uses_otel: boolean;
|
||||
has_existing_observability_tool: boolean;
|
||||
existing_observability_tool: string;
|
||||
logs_scale_per_day_in_gb: number;
|
||||
number_of_services: number;
|
||||
number_of_hosts: number;
|
||||
where_did_you_discover_signoz: string;
|
||||
timeline_for_migrating_to_signoz: string;
|
||||
}
|
||||
30
pkg/apiserver/signozapiserver/authz.go
Normal file
30
pkg/apiserver/signozapiserver/authz.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addAuthzRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v1/authz/check", handler.New(provider.authzHandler.Check, handler.OpenAPIDef{
|
||||
ID: "AuthzCheck",
|
||||
Tags: []string{"authz"},
|
||||
Summary: "Check permissions",
|
||||
Description: "Checks if the authenticated user has permissions for given transactions",
|
||||
Request: make([]*authtypes.Transaction, 0),
|
||||
RequestContentType: "",
|
||||
Response: make([]*authtypes.GettableTransaction, 0),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: nil,
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -20,6 +21,7 @@ func (provider *provider) addPromoteRoutes(router *mux.Router) error {
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleEditor),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -35,6 +37,7 @@ func (provider *provider) addPromoteRoutes(router *mux.Router) error {
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -20,8 +20,10 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
@@ -44,6 +46,8 @@ type provider struct {
|
||||
gatewayHandler gateway.Handler
|
||||
fieldsHandler fields.Handler
|
||||
authzHandler authz.Handler
|
||||
zeusHandler zeus.Handler
|
||||
querierHandler querier.Handler
|
||||
}
|
||||
|
||||
func NewFactory(
|
||||
@@ -63,6 +67,8 @@ func NewFactory(
|
||||
gatewayHandler gateway.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
zeusHandler zeus.Handler,
|
||||
querierHandler querier.Handler,
|
||||
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
|
||||
return newProvider(
|
||||
@@ -85,6 +91,8 @@ func NewFactory(
|
||||
gatewayHandler,
|
||||
fieldsHandler,
|
||||
authzHandler,
|
||||
zeusHandler,
|
||||
querierHandler,
|
||||
)
|
||||
})
|
||||
}
|
||||
@@ -109,6 +117,8 @@ func newProvider(
|
||||
gatewayHandler gateway.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
zeusHandler zeus.Handler,
|
||||
querierHandler querier.Handler,
|
||||
) (apiserver.APIServer, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
|
||||
router := mux.NewRouter().UseEncodedPath()
|
||||
@@ -131,6 +141,8 @@ func newProvider(
|
||||
gatewayHandler: gatewayHandler,
|
||||
fieldsHandler: fieldsHandler,
|
||||
authzHandler: authzHandler,
|
||||
zeusHandler: zeusHandler,
|
||||
querierHandler: querierHandler,
|
||||
}
|
||||
|
||||
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
|
||||
@@ -195,10 +207,22 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addAuthzRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addFieldsRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addZeusRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addQuerierRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
471
pkg/apiserver/signozapiserver/querier.go
Normal file
471
pkg/apiserver/signozapiserver/querier.go
Normal file
@@ -0,0 +1,471 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addQuerierRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v5/query_range", handler.New(provider.authZ.ViewAccess(provider.querierHandler.QueryRange), handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"querier"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
RequestExamples: []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "span_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_time_series",
|
||||
Summary: "Time series: count error logs grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "log_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "log_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_gauge_time_series",
|
||||
Summary: "Time series: latest gauge value averaged across series",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "system.cpu.utilization", "timeAggregation": "latest", "spaceAggregation": "avg"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "host.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_rate_time_series",
|
||||
Summary: "Time series: rate of cumulative counter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum"},
|
||||
},
|
||||
"stepInterval": 120,
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_histogram_time_series",
|
||||
Summary: "Time series: p99 latency from histogram",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.bucket", "spaceAggregation": "p99"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_raw",
|
||||
Summary: "Raw: fetch raw log records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "body", "fieldContext": "log"},
|
||||
map[string]any{"name": "service.name", "fieldContext": "resource"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "log"}, "direction": "desc"},
|
||||
map[string]any{"key": map[string]any{"name": "id"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 50,
|
||||
"offset": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_raw",
|
||||
Summary: "Raw: fetch raw span records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend' AND has_error = true"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "name", "fieldContext": "span"},
|
||||
map[string]any{"name": "duration_nano", "fieldContext": "span"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "span"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_scalar",
|
||||
Summary: "Scalar: total span count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_scalar",
|
||||
Summary: "Scalar: total error log count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "error_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_scalar",
|
||||
Summary: "Scalar: single reduced metric value",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum", "reduceTo": "sum"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "formula",
|
||||
Summary: "Formula: error rate from two trace queries",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "countIf(has_error = true)"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "B",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "count()"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_formula",
|
||||
"spec": map[string]any{
|
||||
"name": "error_rate",
|
||||
"expression": "A / B * 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "promql",
|
||||
Summary: "PromQL: request rate with UTF-8 metric name",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "promql",
|
||||
"spec": map[string]any{
|
||||
"name": "request_rate",
|
||||
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
|
||||
"step": 60,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_time_series",
|
||||
Summary: "ClickHouse SQL: traces time series with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "span_rate",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" GROUP BY ts ORDER BY ts",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_logs_raw",
|
||||
Summary: "ClickHouse SQL: raw logs with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "recent_errors",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT timestamp, body" +
|
||||
" FROM signoz_logs.distributed_logs_v2" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" AND severity_text = 'ERROR'" +
|
||||
" ORDER BY timestamp DESC LIMIT 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_scalar",
|
||||
Summary: "ClickHouse SQL: scalar aggregate with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "total_spans",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Response: new(qbtypes.QueryRangeResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v5/substitute_vars", handler.New(provider.authZ.ViewAccess(provider.querierHandler.ReplaceVariables), handler.OpenAPIDef{
|
||||
ID: "ReplaceVariables",
|
||||
Tags: []string{"querier"},
|
||||
Summary: "Replace variables",
|
||||
Description: "Replace variables in a query",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(qbtypes.QueryRangeRequest),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -15,12 +16,12 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
|
||||
Tags: []string{"role"},
|
||||
Summary: "Create role",
|
||||
Description: "This endpoint creates a role",
|
||||
Request: nil,
|
||||
Request: new(roletypes.PostableRole),
|
||||
RequestContentType: "",
|
||||
Response: new(types.Identifiable),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{},
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusConflict, http.StatusNotImplemented, http.StatusUnavailableForLegalReasons},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
@@ -44,6 +45,23 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/roles/resources", handler.New(provider.authZ.AdminAccess(provider.authzHandler.GetResources), handler.OpenAPIDef{
|
||||
ID: "GetResources",
|
||||
Tags: []string{"role"},
|
||||
Summary: "Get resources",
|
||||
Description: "Gets all the available resources for role assignment",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(roletypes.GettableResources),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Get), handler.OpenAPIDef{
|
||||
ID: "GetRole",
|
||||
Tags: []string{"role"},
|
||||
@@ -61,17 +79,51 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/roles/{id}/relation/{relation}/objects", handler.New(provider.authZ.AdminAccess(provider.authzHandler.GetObjects), handler.OpenAPIDef{
|
||||
ID: "GetObjects",
|
||||
Tags: []string{"role"},
|
||||
Summary: "Get objects for a role by relation",
|
||||
Description: "Gets all objects connected to the specified role via a given relation type",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: make([]*authtypes.Object, 0),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusNotFound, http.StatusNotImplemented, http.StatusUnavailableForLegalReasons},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Patch), handler.OpenAPIDef{
|
||||
ID: "PatchRole",
|
||||
Tags: []string{"role"},
|
||||
Summary: "Patch role",
|
||||
Description: "This endpoint patches a role",
|
||||
Request: nil,
|
||||
Request: new(roletypes.PatchableRole),
|
||||
RequestContentType: "",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{},
|
||||
ErrorStatusCodes: []int{http.StatusNotFound, http.StatusNotImplemented, http.StatusUnavailableForLegalReasons},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodPatch).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/roles/{id}/relation/{relation}/objects", handler.New(provider.authZ.AdminAccess(provider.authzHandler.PatchObjects), handler.OpenAPIDef{
|
||||
ID: "PatchObjects",
|
||||
Tags: []string{"role"},
|
||||
Summary: "Patch objects for a role by relation",
|
||||
Description: "Patches the objects connected to the specified role via a given relation type",
|
||||
Request: new(roletypes.PatchableObjects),
|
||||
RequestContentType: "",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{http.StatusNotFound, http.StatusBadRequest, http.StatusNotImplemented, http.StatusUnavailableForLegalReasons},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodPatch).GetError(); err != nil {
|
||||
@@ -88,7 +140,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{},
|
||||
ErrorStatusCodes: []int{http.StatusNotFound, http.StatusNotImplemented, http.StatusUnavailableForLegalReasons},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodDelete).GetError(); err != nil {
|
||||
|
||||
65
pkg/apiserver/signozapiserver/zeus.go
Normal file
65
pkg/apiserver/signozapiserver/zeus.go
Normal file
@@ -0,0 +1,65 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/zeustypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addZeusRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v2/zeus/profiles", handler.New(provider.authZ.AdminAccess(provider.zeusHandler.PutProfile), handler.OpenAPIDef{
|
||||
ID: "PutProfile",
|
||||
Tags: []string{"zeus"},
|
||||
Summary: "Put profile in Zeus for a deployment.",
|
||||
Description: "This endpoint saves the profile of a deployment to zeus.",
|
||||
Request: new(zeustypes.PostableProfile),
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusForbidden, http.StatusNotFound, http.StatusConflict},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodPut).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/zeus/hosts", handler.New(provider.authZ.AdminAccess(provider.zeusHandler.GetHosts), handler.OpenAPIDef{
|
||||
ID: "GetHosts",
|
||||
Tags: []string{"zeus"},
|
||||
Summary: "Get host info from Zeus.",
|
||||
Description: "This endpoint gets the host info from zeus.",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(zeustypes.GettableHost),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusForbidden, http.StatusNotFound},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/zeus/hosts", handler.New(provider.authZ.AdminAccess(provider.zeusHandler.PutHost), handler.OpenAPIDef{
|
||||
ID: "PutHost",
|
||||
Tags: []string{"zeus"},
|
||||
Summary: "Put host in Zeus for a deployment.",
|
||||
Description: "This endpoint saves the host of a deployment to zeus.",
|
||||
Request: new(zeustypes.PostableHost),
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusForbidden, http.StatusNotFound, http.StatusConflict},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodPut).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -14,17 +14,14 @@ import (
|
||||
type AuthZ interface {
|
||||
factory.Service
|
||||
|
||||
// Check returns error when the upstream authorization server is unavailable or the subject (s) doesn't have relation (r) on object (o).
|
||||
Check(context.Context, *openfgav1.TupleKey) error
|
||||
|
||||
// CheckWithTupleCreation takes upon the responsibility for generating the tuples alongside everything Check does.
|
||||
CheckWithTupleCreation(context.Context, authtypes.Claims, valuer.UUID, authtypes.Relation, authtypes.Typeable, []authtypes.Selector, []authtypes.Selector) error
|
||||
|
||||
// CheckWithTupleCreationWithoutClaims checks permissions for anonymous users.
|
||||
CheckWithTupleCreationWithoutClaims(context.Context, valuer.UUID, authtypes.Relation, authtypes.Typeable, []authtypes.Selector, []authtypes.Selector) error
|
||||
|
||||
// Batch Check returns error when the upstream authorization server is unavailable or for all the tuples of subject (s) doesn't have relation (r) on object (o).
|
||||
BatchCheck(context.Context, []*openfgav1.TupleKey) error
|
||||
// BatchCheck accepts a map of ID → tuple and returns a map of ID → authorization result.
|
||||
BatchCheck(context.Context, map[string]*openfgav1.TupleKey) (map[string]*authtypes.TupleKeyAuthorization, error)
|
||||
|
||||
// Write accepts the insertion tuples and the deletion tuples.
|
||||
Write(context.Context, []*openfgav1.TupleKey, []*openfgav1.TupleKey) error
|
||||
@@ -102,5 +99,7 @@ type Handler interface {
|
||||
|
||||
PatchObjects(http.ResponseWriter, *http.Request)
|
||||
|
||||
Check(http.ResponseWriter, *http.Request)
|
||||
|
||||
Delete(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@ func (store *store) Create(ctx context.Context, role *roletypes.StorableRole) er
|
||||
Model(role).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return store.sqlstore.WrapAlreadyExistsErrf(err, errors.CodeAlreadyExists, "role with id: %s already exists", role.ID)
|
||||
return store.sqlstore.WrapAlreadyExistsErrf(err, errors.CodeAlreadyExists, "role with name: %s already exists", role.Name)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
@@ -48,11 +48,7 @@ func (provider *provider) Stop(ctx context.Context) error {
|
||||
return provider.server.Stop(ctx)
|
||||
}
|
||||
|
||||
func (provider *provider) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
|
||||
return provider.server.Check(ctx, tupleReq)
|
||||
}
|
||||
|
||||
func (provider *provider) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
|
||||
func (provider *provider) BatchCheck(ctx context.Context, tupleReq map[string]*openfgav1.TupleKey) (map[string]*authtypes.TupleKeyAuthorization, error) {
|
||||
return provider.server.BatchCheck(ctx, tupleReq)
|
||||
}
|
||||
|
||||
@@ -181,10 +177,6 @@ func (provider *provider) CreateManagedRoles(ctx context.Context, _ valuer.UUID,
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) SetManagedRoleTransactions(context.Context, valuer.UUID) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
|
||||
return provider.Grant(ctx, orgID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil))
|
||||
}
|
||||
|
||||
@@ -90,42 +90,18 @@ func (server *Server) Stop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
|
||||
func (server *Server) BatchCheck(ctx context.Context, tupleReq map[string]*openfgav1.TupleKey) (map[string]*authtypes.TupleKeyAuthorization, error) {
|
||||
storeID, modelID := server.getStoreIDandModelID()
|
||||
checkResponse, err := server.openfgaServer.Check(
|
||||
ctx,
|
||||
&openfgav1.CheckRequest{
|
||||
StoreId: storeID,
|
||||
AuthorizationModelId: modelID,
|
||||
TupleKey: &openfgav1.CheckRequestTupleKey{
|
||||
User: tupleReq.User,
|
||||
Relation: tupleReq.Relation,
|
||||
Object: tupleReq.Object,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
if !checkResponse.Allowed {
|
||||
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
|
||||
storeID, modelID := server.getStoreIDandModelID()
|
||||
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
|
||||
for idx, tuple := range tupleReq {
|
||||
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0, len(tupleReq))
|
||||
for id, tuple := range tupleReq {
|
||||
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
|
||||
TupleKey: &openfgav1.CheckRequestTupleKey{
|
||||
User: tuple.User,
|
||||
Relation: tuple.Relation,
|
||||
Object: tuple.Object,
|
||||
},
|
||||
// the batch check response is map[string] keyed by correlationID.
|
||||
CorrelationId: strconv.Itoa(idx),
|
||||
// Use transaction ID as correlation ID for deterministic mapping
|
||||
CorrelationId: id,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -137,17 +113,18 @@ func (server *Server) BatchCheck(ctx context.Context, tupleReq []*openfgav1.Tupl
|
||||
Checks: batchCheckItems,
|
||||
})
|
||||
if err != nil {
|
||||
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
|
||||
return nil, errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
for _, checkResponse := range checkResponse.Result {
|
||||
if checkResponse.GetAllowed() {
|
||||
return nil
|
||||
response := make(map[string]*authtypes.TupleKeyAuthorization, len(tupleReq))
|
||||
for id, tuple := range tupleReq {
|
||||
response[id] = &authtypes.TupleKeyAuthorization{
|
||||
Tuple: tuple,
|
||||
Authorized: checkResponse.Result[id].GetAllowed(),
|
||||
}
|
||||
}
|
||||
|
||||
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
|
||||
@@ -156,17 +133,29 @@ func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtyp
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
|
||||
tupleSlice, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = server.BatchCheck(ctx, tuples)
|
||||
// Convert slice to map with generated IDs for internal use
|
||||
tuples := make(map[string]*openfgav1.TupleKey, len(tupleSlice))
|
||||
for idx, tuple := range tupleSlice {
|
||||
tuples[strconv.Itoa(idx)] = tuple
|
||||
}
|
||||
|
||||
response, err := server.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
for _, resp := range response {
|
||||
if resp.Authorized {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
|
||||
}
|
||||
|
||||
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
|
||||
@@ -175,17 +164,29 @@ func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, o
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
|
||||
tupleSlice, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = server.BatchCheck(ctx, tuples)
|
||||
// Convert slice to map with generated IDs for internal use
|
||||
tuples := make(map[string]*openfgav1.TupleKey, len(tupleSlice))
|
||||
for idx, tuple := range tupleSlice {
|
||||
tuples[strconv.Itoa(idx)] = tuple
|
||||
}
|
||||
|
||||
response, err := server.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
for _, resp := range response {
|
||||
if resp.Authorized {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
|
||||
}
|
||||
|
||||
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -35,13 +36,14 @@ func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.authz.Create(ctx, valuer.MustNewUUID(claims.OrgID), roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID)))
|
||||
role := roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID))
|
||||
err = handler.authz.Create(ctx, valuer.MustNewUUID(claims.OrgID), role)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusCreated, nil)
|
||||
render.Success(rw, http.StatusCreated, types.Identifiable{ID: role.ID})
|
||||
}
|
||||
|
||||
func (handler *handler) Get(rw http.ResponseWriter, r *http.Request) {
|
||||
@@ -112,17 +114,9 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
func (handler *handler) GetResources(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
resources := handler.authz.GetResources(ctx)
|
||||
resources := handler.authz.GetResources(r.Context())
|
||||
|
||||
var resourceRelations = struct {
|
||||
Resources []*authtypes.Resource `json:"resources"`
|
||||
Relations map[authtypes.Type][]authtypes.Relation `json:"relations"`
|
||||
}{
|
||||
Resources: resources,
|
||||
Relations: authtypes.TypeableRelations,
|
||||
}
|
||||
render.Success(rw, http.StatusOK, resourceRelations)
|
||||
render.Success(rw, http.StatusOK, roletypes.NewGettableResources(resources))
|
||||
}
|
||||
|
||||
func (handler *handler) List(rw http.ResponseWriter, r *http.Request) {
|
||||
@@ -227,7 +221,7 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusAccepted, nil)
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (handler *handler) Delete(rw http.ResponseWriter, r *http.Request) {
|
||||
@@ -252,3 +246,39 @@ func (handler *handler) Delete(rw http.ResponseWriter, r *http.Request) {
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (handler *handler) Check(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
transactions := make([]*authtypes.Transaction, 0)
|
||||
if err := binding.JSON.BindBody(r.Body, &transactions); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
tuples, err := authtypes.NewTuplesFromTransactions(transactions, subject, orgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
results, err := handler.authz.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, authtypes.NewGettableTransaction(transactions, results))
|
||||
}
|
||||
|
||||
@@ -21,18 +21,17 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/variables"
|
||||
)
|
||||
|
||||
type API struct {
|
||||
type handler struct {
|
||||
set factory.ProviderSettings
|
||||
analytics analytics.Analytics
|
||||
querier Querier
|
||||
}
|
||||
|
||||
func NewAPI(set factory.ProviderSettings, querier Querier, analytics analytics.Analytics) *API {
|
||||
return &API{set: set, querier: querier, analytics: analytics}
|
||||
func NewHandler(set factory.ProviderSettings, querier Querier, analytics analytics.Analytics) Handler {
|
||||
return &handler{set: set, querier: querier, analytics: analytics}
|
||||
}
|
||||
|
||||
func (a *API) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
func (handler *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
@@ -53,7 +52,7 @@ func (a *API) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
a.set.Logger.ErrorContext(ctx, "panic in QueryRange",
|
||||
handler.set.Logger.ErrorContext(ctx, "panic in QueryRange",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
@@ -79,17 +78,17 @@ func (a *API) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
queryRangeResponse, err := a.querier.QueryRange(ctx, orgID, &queryRangeRequest)
|
||||
queryRangeResponse, err := handler.querier.QueryRange(ctx, orgID, &queryRangeRequest)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
a.logEvent(req.Context(), req.Header.Get("Referer"), queryRangeResponse.QBEvent)
|
||||
handler.logEvent(req.Context(), req.Header.Get("Referer"), queryRangeResponse.QBEvent)
|
||||
|
||||
render.Success(rw, http.StatusOK, queryRangeResponse)
|
||||
}
|
||||
func (a *API) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
func (handler *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
|
||||
// get the param from url and add it to body
|
||||
@@ -153,7 +152,7 @@ func (a *API) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
a.set.Logger.ErrorContext(ctx, "panic in QueryRawStream",
|
||||
handler.set.Logger.ErrorContext(ctx, "panic in QueryRawStream",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
@@ -193,7 +192,7 @@ func (a *API) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
flusher.Flush()
|
||||
|
||||
client := &qbtypes.RawStream{Name: req.RemoteAddr, Logs: make(chan *qbtypes.RawRow, 1000), Done: make(chan *bool), Error: make(chan error)}
|
||||
go a.querier.QueryRawStream(ctx, orgID, &queryRangeRequest, client)
|
||||
go handler.querier.QueryRawStream(ctx, orgID, &queryRangeRequest, client)
|
||||
|
||||
for {
|
||||
select {
|
||||
@@ -220,7 +219,7 @@ func (a *API) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
// TODO(srikanthccv): everything done here can be done on frontend as well
|
||||
// For the time being I am adding a helper function
|
||||
func (a *API) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
func (handler *handler) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
|
||||
@@ -272,7 +271,7 @@ func (a *API) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
render.Success(rw, http.StatusOK, queryRangeRequest)
|
||||
}
|
||||
|
||||
func (a *API) logEvent(ctx context.Context, referrer string, event *qbtypes.QBEvent) {
|
||||
func (handler *handler) logEvent(ctx context.Context, referrer string, event *qbtypes.QBEvent) {
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
return
|
||||
@@ -305,8 +304,9 @@ func (a *API) logEvent(ctx context.Context, referrer string, event *qbtypes.QBEv
|
||||
}
|
||||
|
||||
if !event.HasData {
|
||||
a.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", properties)
|
||||
handler.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", properties)
|
||||
return
|
||||
}
|
||||
a.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
|
||||
|
||||
handler.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package querier
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -20,3 +21,9 @@ type BucketCache interface {
|
||||
// store fresh buckets for future hits
|
||||
Put(ctx context.Context, orgID valuer.UUID, q qbtypes.Query, step qbtypes.Step, fresh *qbtypes.Result)
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
QueryRange(rw http.ResponseWriter, req *http.Request)
|
||||
QueryRawStream(rw http.ResponseWriter, req *http.Request)
|
||||
ReplaceVariables(rw http.ResponseWriter, req *http.Request)
|
||||
}
|
||||
|
||||
@@ -1,454 +1 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
)
|
||||
|
||||
// QueryRangeV5OpenAPIDef is the OpenAPI definition for the /api/v5/query_range endpoint.
|
||||
var QueryRangeV5OpenAPIDef = handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"query"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
RequestExamples: queryRangeV5Examples,
|
||||
Response: new(qbtypes.QueryRangeResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{
|
||||
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
},
|
||||
}
|
||||
|
||||
var queryRangeV5Examples = []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "span_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_time_series",
|
||||
Summary: "Time series: count error logs grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "log_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "log_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_gauge_time_series",
|
||||
Summary: "Time series: latest gauge value averaged across series",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "system.cpu.utilization", "timeAggregation": "latest", "spaceAggregation": "avg"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "host.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_rate_time_series",
|
||||
Summary: "Time series: rate of cumulative counter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum"},
|
||||
},
|
||||
"stepInterval": 120,
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_histogram_time_series",
|
||||
Summary: "Time series: p99 latency from histogram",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.bucket", "spaceAggregation": "p99"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_raw",
|
||||
Summary: "Raw: fetch raw log records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "body", "fieldContext": "log"},
|
||||
map[string]any{"name": "service.name", "fieldContext": "resource"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "log"}, "direction": "desc"},
|
||||
map[string]any{"key": map[string]any{"name": "id"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 50,
|
||||
"offset": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_raw",
|
||||
Summary: "Raw: fetch raw span records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend' AND has_error = true"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "name", "fieldContext": "span"},
|
||||
map[string]any{"name": "duration_nano", "fieldContext": "span"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "span"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_scalar",
|
||||
Summary: "Scalar: total span count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_scalar",
|
||||
Summary: "Scalar: total error log count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "error_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_scalar",
|
||||
Summary: "Scalar: single reduced metric value",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum", "reduceTo": "sum"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "formula",
|
||||
Summary: "Formula: error rate from two trace queries",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "countIf(has_error = true)"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "B",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "count()"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_formula",
|
||||
"spec": map[string]any{
|
||||
"name": "error_rate",
|
||||
"expression": "A / B * 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "promql",
|
||||
Summary: "PromQL: request rate with UTF-8 metric name",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "promql",
|
||||
"spec": map[string]any{
|
||||
"name": "request_rate",
|
||||
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
|
||||
"step": 60,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_time_series",
|
||||
Summary: "ClickHouse SQL: traces time series with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "span_rate",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" GROUP BY ts ORDER BY ts",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_logs_raw",
|
||||
Summary: "ClickHouse SQL: raw logs with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "recent_errors",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT timestamp, body" +
|
||||
" FROM signoz_logs.distributed_logs_v2" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" AND severity_text = 'ERROR'" +
|
||||
" ORDER BY timestamp DESC LIMIT 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_scalar",
|
||||
Summary: "ClickHouse SQL: scalar aggregate with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "total_spans",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1913,7 +1913,7 @@ func (r *ClickHouseReader) GetCustomRetentionTTL(ctx context.Context, orgID stri
|
||||
// No V2 configuration found, return defaults
|
||||
response.DefaultTTLDays = 15
|
||||
response.TTLConditions = []model.CustomRetentionRule{}
|
||||
response.Status = constants.StatusFailed
|
||||
response.Status = constants.StatusSuccess
|
||||
response.ColdStorageTTLDays = -1
|
||||
return response, nil
|
||||
}
|
||||
|
||||
@@ -79,8 +79,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
)
|
||||
|
||||
type status string
|
||||
@@ -145,8 +143,6 @@ type APIHandler struct {
|
||||
|
||||
LicensingAPI licensing.API
|
||||
|
||||
QuerierAPI *querierAPI.API
|
||||
|
||||
QueryParserAPI *queryparser.API
|
||||
|
||||
Signoz *signoz.SigNoz
|
||||
@@ -175,8 +171,6 @@ type APIHandlerOpts struct {
|
||||
|
||||
LicensingAPI licensing.API
|
||||
|
||||
QuerierAPI *querierAPI.API
|
||||
|
||||
QueryParserAPI *queryparser.API
|
||||
|
||||
Signoz *signoz.SigNoz
|
||||
@@ -239,7 +233,6 @@ func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, erro
|
||||
AlertmanagerAPI: opts.AlertmanagerAPI,
|
||||
LicensingAPI: opts.LicensingAPI,
|
||||
Signoz: opts.Signoz,
|
||||
QuerierAPI: opts.QuerierAPI,
|
||||
QueryParserAPI: opts.QueryParserAPI,
|
||||
}
|
||||
|
||||
@@ -396,7 +389,7 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *middlew
|
||||
subRouter.HandleFunc("/query_progress", am.ViewAccess(aH.GetQueryProgressUpdates)).Methods(http.MethodGet)
|
||||
|
||||
// live logs
|
||||
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.QuerierAPI.QueryRawStream)).Methods(http.MethodGet)
|
||||
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.Signoz.Handlers.QuerierHandler.QueryRawStream)).Methods(http.MethodGet)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
@@ -470,12 +463,6 @@ func (aH *APIHandler) RegisterQueryRangeV4Routes(router *mux.Router, am *middlew
|
||||
subRouter.HandleFunc("/metric/metric_metadata", am.ViewAccess(aH.getMetricMetadata)).Methods(http.MethodGet)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) RegisterQueryRangeV5Routes(router *mux.Router, am *middleware.AuthZ) {
|
||||
subRouter := router.PathPrefix("/api/v5").Subrouter()
|
||||
subRouter.HandleFunc("/query_range", am.ViewAccess(aH.QuerierAPI.QueryRange)).Methods(http.MethodPost)
|
||||
subRouter.HandleFunc("/substitute_vars", am.ViewAccess(aH.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
|
||||
}
|
||||
|
||||
// todo(remove): Implemented at render package (github.com/SigNoz/signoz/pkg/http/render) with the new error structure
|
||||
func (aH *APIHandler) Respond(w http.ResponseWriter, data interface{}) {
|
||||
writeHttpResponse(w, data)
|
||||
|
||||
@@ -23,7 +23,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
@@ -133,7 +132,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
LicensingAPI: nooplicensing.NewLicenseAPI(),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
}, config)
|
||||
if err != nil {
|
||||
@@ -217,7 +215,6 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
|
||||
api.RegisterInfraMetricsRoutes(r, am)
|
||||
api.RegisterWebSocketPaths(r, am)
|
||||
api.RegisterQueryRangeV4Routes(r, am)
|
||||
api.RegisterQueryRangeV5Routes(r, am)
|
||||
api.RegisterMessagingQueuesRoutes(r, am)
|
||||
api.RegisterThirdPartyApiRoutes(r, am)
|
||||
api.MetricExplorerRoutes(r, am)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package signoz
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/authz/signozauthzapi"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
@@ -31,6 +32,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
)
|
||||
|
||||
type Handlers struct {
|
||||
@@ -48,18 +50,22 @@ type Handlers struct {
|
||||
GatewayHandler gateway.Handler
|
||||
Fields fields.Handler
|
||||
AuthzHandler authz.Handler
|
||||
ZeusHandler zeus.Handler
|
||||
QuerierHandler querier.Handler
|
||||
}
|
||||
|
||||
func NewHandlers(
|
||||
modules Modules,
|
||||
providerSettings factory.ProviderSettings,
|
||||
querier querier.Querier,
|
||||
analytics analytics.Analytics,
|
||||
querierHandler querier.Handler,
|
||||
licensing licensing.Licensing,
|
||||
global global.Global,
|
||||
flaggerService flagger.Flagger,
|
||||
gatewayService gateway.Gateway,
|
||||
telemetryMetadataStore telemetrytypes.MetadataStore,
|
||||
authz authz.AuthZ,
|
||||
zeusService zeus.Zeus,
|
||||
) Handlers {
|
||||
return Handlers{
|
||||
SavedView: implsavedview.NewHandler(modules.SavedView),
|
||||
@@ -76,5 +82,7 @@ func NewHandlers(
|
||||
GatewayHandler: gateway.NewHandler(gatewayService),
|
||||
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
|
||||
AuthzHandler: signozauthzapi.NewHandler(authz),
|
||||
ZeusHandler: zeus.NewHandler(zeusService, licensing),
|
||||
QuerierHandler: querierHandler,
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user