mirror of
https://github.com/SigNoz/signoz.git
synced 2026-05-07 19:10:30 +01:00
Compare commits
1 Commits
nv/patch-d
...
nv/v2-list
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c13270814a |
@@ -261,6 +261,18 @@ func (module *module) UpdatePublicV2(ctx context.Context, orgID valuer.UUID, id
|
||||
return existing, nil
|
||||
}
|
||||
|
||||
func (module *module) ListV2(ctx context.Context, orgID valuer.UUID, userID valuer.UUID, params *dashboardtypes.ListDashboardsV2Params) (*dashboardtypes.ListableDashboardV2, error) {
|
||||
return module.pkgDashboardModule.ListV2(ctx, orgID, userID, params)
|
||||
}
|
||||
|
||||
func (module *module) PinV2(ctx context.Context, orgID valuer.UUID, userID valuer.UUID, id valuer.UUID) error {
|
||||
return module.pkgDashboardModule.PinV2(ctx, orgID, userID, id)
|
||||
}
|
||||
|
||||
func (module *module) UnpinV2(ctx context.Context, userID valuer.UUID, id valuer.UUID) error {
|
||||
return module.pkgDashboardModule.UnpinV2(ctx, userID, id)
|
||||
}
|
||||
|
||||
func (module *module) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*dashboardtypes.Dashboard, error) {
|
||||
return module.pkgDashboardModule.Get(ctx, orgID, id)
|
||||
}
|
||||
|
||||
65
ee/sqlstore/postgressqlstore/listfilter_test.go
Normal file
65
ee/sqlstore/postgressqlstore/listfilter_test.go
Normal file
@@ -0,0 +1,65 @@
|
||||
package postgressqlstore
|
||||
|
||||
// Lives in this package (rather than the listfilter package) so it can use
|
||||
// the unexported newFormatter constructor without driving a real Postgres
|
||||
// connection. Covers the only listfilter cases whose emitted SQL differs
|
||||
// between SQLite and Postgres — the ones that go through JSONExtractString
|
||||
// (`name`, `description`). All other operators (=, !=, BETWEEN, LIKE, IN,
|
||||
// EXISTS, lower(...)) emit identical ANSI SQL on both dialects and are
|
||||
// covered by the SQLite tests in the listfilter package itself.
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/uptrace/bun/dialect/pgdialect"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes/listfilter"
|
||||
)
|
||||
|
||||
func TestListFilterCompile_Postgres(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
query string
|
||||
wantSQL string
|
||||
wantArgs []any
|
||||
}{
|
||||
{
|
||||
name: "name = uses Postgres -> / ->> chain",
|
||||
query: `name = 'overview'`,
|
||||
wantSQL: `"d"."data"->'data'->'display'->>'name' = ?`,
|
||||
wantArgs: []any{"overview"},
|
||||
},
|
||||
{
|
||||
name: "name CONTAINS — same JSON path, LIKE pattern",
|
||||
query: `name CONTAINS 'overview'`,
|
||||
wantSQL: `"d"."data"->'data'->'display'->>'name' LIKE ?`,
|
||||
wantArgs: []any{"%overview%"},
|
||||
},
|
||||
{
|
||||
name: "name ILIKE — LOWER wraps the JSON path",
|
||||
query: `name ILIKE 'Prod%'`,
|
||||
wantSQL: `lower("d"."data"->'data'->'display'->>'name') LIKE LOWER(?)`,
|
||||
wantArgs: []any{"Prod%"},
|
||||
},
|
||||
{
|
||||
name: "description = follows the same path shape",
|
||||
query: `description = 'd1'`,
|
||||
wantSQL: `"d"."data"->'data'->'display'->>'description' = ?`,
|
||||
wantArgs: []any{"d1"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
out, err := listfilter.Compile(c.query, f)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, out)
|
||||
assert.Equal(t, c.wantSQL, out.SQL)
|
||||
assert.Equal(t, c.wantArgs, out.Args)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -13,6 +13,23 @@ import (
|
||||
)
|
||||
|
||||
func (provider *provider) addDashboardRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v2/dashboards", handler.New(provider.authZ.ViewAccess(provider.dashboardHandler.ListV2), handler.OpenAPIDef{
|
||||
ID: "ListDashboardsV2",
|
||||
Tags: []string{"dashboard"},
|
||||
Summary: "List dashboards (v2)",
|
||||
Description: "Returns a page of v2-shape dashboards for the calling user's org. Supports a filter DSL (`query`), sort (`updated_at`/`created_at`/`title`), order (`asc`/`desc`), and offset-based pagination (`limit`/`offset`). Pinned dashboards float to the top of each page.",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(dashboardtypes.ListableDashboardV2),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/dashboards", handler.New(provider.authZ.EditAccess(provider.dashboardHandler.CreateV2), handler.OpenAPIDef{
|
||||
ID: "CreateDashboardV2",
|
||||
Tags: []string{"dashboard"},
|
||||
@@ -121,6 +138,42 @@ func (provider *provider) addDashboardRoutes(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
// ViewAccess: pinning only mutates the calling user's pin list, not the
|
||||
// dashboard itself — anyone who can view a dashboard can bookmark it.
|
||||
if err := router.Handle("/api/v2/dashboards/{id}/pins/me", handler.New(provider.authZ.ViewAccess(provider.dashboardHandler.PinV2), handler.OpenAPIDef{
|
||||
ID: "PinDashboardV2",
|
||||
Tags: []string{"dashboard"},
|
||||
Summary: "Pin a dashboard for the current user (v2)",
|
||||
Description: "Pins the dashboard for the calling user. A user can pin at most 10 dashboards; pinning when at the limit returns 409. Re-pinning an already-pinned dashboard is a no-op success.",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodPut).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/dashboards/{id}/pins/me", handler.New(provider.authZ.ViewAccess(provider.dashboardHandler.UnpinV2), handler.OpenAPIDef{
|
||||
ID: "UnpinDashboardV2",
|
||||
Tags: []string{"dashboard"},
|
||||
Summary: "Unpin a dashboard for the current user (v2)",
|
||||
Description: "Removes the pin for the calling user. Idempotent — unpinning a dashboard that wasn't pinned still returns 204.",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodDelete).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/dashboards/{id}/public", handler.New(provider.authZ.AdminAccess(provider.dashboardHandler.CreatePublicV2), handler.OpenAPIDef{
|
||||
ID: "CreatePublicDashboardV2",
|
||||
Tags: []string{"dashboard"},
|
||||
|
||||
@@ -61,6 +61,8 @@ type Module interface {
|
||||
|
||||
GetV2(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*dashboardtypes.DashboardV2, error)
|
||||
|
||||
ListV2(ctx context.Context, orgID valuer.UUID, userID valuer.UUID, params *dashboardtypes.ListDashboardsV2Params) (*dashboardtypes.ListableDashboardV2, error)
|
||||
|
||||
UpdateV2(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, updateable dashboardtypes.UpdateableDashboardV2) (*dashboardtypes.DashboardV2, error)
|
||||
|
||||
PatchV2(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, patch dashboardtypes.PatchableDashboardV2) (*dashboardtypes.DashboardV2, error)
|
||||
@@ -70,6 +72,10 @@ type Module interface {
|
||||
CreatePublicV2(ctx context.Context, orgID valuer.UUID, id valuer.UUID, postable dashboardtypes.PostablePublicDashboard) (*dashboardtypes.DashboardV2, error)
|
||||
|
||||
UpdatePublicV2(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatable dashboardtypes.UpdatablePublicDashboard) (*dashboardtypes.DashboardV2, error)
|
||||
|
||||
PinV2(ctx context.Context, orgID valuer.UUID, userID valuer.UUID, id valuer.UUID) error
|
||||
|
||||
UnpinV2(ctx context.Context, userID valuer.UUID, id valuer.UUID) error
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
@@ -100,6 +106,8 @@ type Handler interface {
|
||||
|
||||
GetV2(http.ResponseWriter, *http.Request)
|
||||
|
||||
ListV2(http.ResponseWriter, *http.Request)
|
||||
|
||||
UpdateV2(http.ResponseWriter, *http.Request)
|
||||
|
||||
PatchV2(http.ResponseWriter, *http.Request)
|
||||
@@ -111,4 +119,8 @@ type Handler interface {
|
||||
CreatePublicV2(http.ResponseWriter, *http.Request)
|
||||
|
||||
UpdatePublicV2(http.ResponseWriter, *http.Request)
|
||||
|
||||
PinV2(http.ResponseWriter, *http.Request)
|
||||
|
||||
UnpinV2(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
|
||||
@@ -2,12 +2,14 @@ package impldashboard
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes/listfilter"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
@@ -124,6 +126,127 @@ func (store *store) GetV2(ctx context.Context, orgID valuer.UUID, id valuer.UUID
|
||||
return storable, public, nil
|
||||
}
|
||||
|
||||
// ListV2 emits the joined dashboard ⨝ pinned_dashboard ⨝ public_dashboard
|
||||
// query the spec calls for. Aliases:
|
||||
//
|
||||
// dashboard AS d — the visitor expects this
|
||||
// pinned_dashboard AS pin — only used inside this query
|
||||
// public_dashboard AS pd — the visitor expects this
|
||||
//
|
||||
// Sort is "is_pinned DESC, <sort> <order>" so pinned dashboards float to the
|
||||
// top inside the requested ordering. Title-sort goes through the same
|
||||
// JSONExtractString path the visitor uses for name/description filtering.
|
||||
func (store *store) ListV2(
|
||||
ctx context.Context,
|
||||
orgID valuer.UUID,
|
||||
userID valuer.UUID,
|
||||
params *dashboardtypes.ListDashboardsV2Params,
|
||||
) ([]*dashboardtypes.DashboardListRow, bool, error) {
|
||||
compiled, err := listfilter.Compile(params.Query, store.sqlstore.Formatter())
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
type listedRow struct {
|
||||
bun.BaseModel `bun:"table:dashboard,alias:d"`
|
||||
|
||||
ID valuer.UUID `bun:"id"`
|
||||
OrgID valuer.UUID `bun:"org_id"`
|
||||
Data dashboardtypes.StorableDashboardData `bun:"data"`
|
||||
Locked bool `bun:"locked"`
|
||||
CreatedAt time.Time `bun:"created_at"`
|
||||
CreatedBy string `bun:"created_by"`
|
||||
UpdatedAt time.Time `bun:"updated_at"`
|
||||
UpdatedBy string `bun:"updated_by"`
|
||||
|
||||
IsPinned bool `bun:"is_pinned"`
|
||||
|
||||
PublicID *valuer.UUID `bun:"public_id"`
|
||||
PublicCreatedAt *time.Time `bun:"public_created_at"`
|
||||
PublicUpdatedAt *time.Time `bun:"public_updated_at"`
|
||||
PublicTimeRangeEnabled *bool `bun:"public_time_range_enabled"`
|
||||
PublicDefaultTimeRange *string `bun:"public_default_time_range"`
|
||||
}
|
||||
|
||||
rows := make([]*listedRow, 0)
|
||||
|
||||
q := store.sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(&rows).
|
||||
ColumnExpr("d.id, d.org_id, d.data, d.locked, d.created_at, d.created_by, d.updated_at, d.updated_by").
|
||||
ColumnExpr("CASE WHEN pin.user_id IS NOT NULL THEN 1 ELSE 0 END AS is_pinned").
|
||||
ColumnExpr("pd.id AS public_id, pd.created_at AS public_created_at, pd.updated_at AS public_updated_at, pd.time_range_enabled AS public_time_range_enabled, pd.default_time_range AS public_default_time_range").
|
||||
Join("LEFT JOIN pinned_dashboard AS pin ON pin.user_id = ? AND pin.dashboard_id = d.id", userID).
|
||||
Join("LEFT JOIN public_dashboard AS pd ON pd.dashboard_id = d.id").
|
||||
Where("d.org_id = ?", orgID).
|
||||
Where("d.deleted_at IS NULL")
|
||||
|
||||
if compiled != nil {
|
||||
q = q.Where(compiled.SQL, compiled.Args...)
|
||||
}
|
||||
|
||||
sortExpr, err := store.sortExprForListV2(params.Sort)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
q = q.
|
||||
OrderExpr("is_pinned DESC").
|
||||
OrderExpr(sortExpr + " " + strings.ToUpper(string(params.Order))).
|
||||
Limit(params.Limit + 1).
|
||||
Offset(params.Offset)
|
||||
|
||||
if err := q.Scan(ctx); err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
hasMore := len(rows) > params.Limit
|
||||
if hasMore {
|
||||
rows = rows[:params.Limit]
|
||||
}
|
||||
|
||||
out := make([]*dashboardtypes.DashboardListRow, len(rows))
|
||||
for i, r := range rows {
|
||||
row := &dashboardtypes.DashboardListRow{
|
||||
Dashboard: &dashboardtypes.StorableDashboard{
|
||||
Identifiable: types.Identifiable{ID: r.ID},
|
||||
TimeAuditable: types.TimeAuditable{CreatedAt: r.CreatedAt, UpdatedAt: r.UpdatedAt},
|
||||
UserAuditable: types.UserAuditable{CreatedBy: r.CreatedBy, UpdatedBy: r.UpdatedBy},
|
||||
OrgID: r.OrgID,
|
||||
Data: r.Data,
|
||||
Locked: r.Locked,
|
||||
},
|
||||
Pinned: r.IsPinned,
|
||||
}
|
||||
if r.PublicID != nil {
|
||||
row.Public = &dashboardtypes.StorablePublicDashboard{
|
||||
Identifiable: types.Identifiable{ID: *r.PublicID},
|
||||
TimeAuditable: types.TimeAuditable{CreatedAt: *r.PublicCreatedAt, UpdatedAt: *r.PublicUpdatedAt},
|
||||
TimeRangeEnabled: *r.PublicTimeRangeEnabled,
|
||||
DefaultTimeRange: *r.PublicDefaultTimeRange,
|
||||
DashboardID: r.ID.StringValue(),
|
||||
}
|
||||
}
|
||||
out[i] = row
|
||||
}
|
||||
return out, hasMore, nil
|
||||
}
|
||||
|
||||
// sortExprForListV2 maps a sort enum to the SQL expression to plug into
|
||||
// ORDER BY. Title-sort routes through the SQLFormatter so it stays
|
||||
// dialect-aware (matches what listfilter/visitor does for the name filter).
|
||||
func (store *store) sortExprForListV2(sort dashboardtypes.ListSort) (string, error) {
|
||||
switch sort {
|
||||
case dashboardtypes.ListSortUpdatedAt:
|
||||
return "d.updated_at", nil
|
||||
case dashboardtypes.ListSortCreatedAt:
|
||||
return "d.created_at", nil
|
||||
case dashboardtypes.ListSortName:
|
||||
return string(store.sqlstore.Formatter().JSONExtractString("d.data", "$.data.display.name")), nil
|
||||
}
|
||||
return "", errors.Newf(errors.TypeInvalidInput, dashboardtypes.ErrCodeDashboardListInvalid,
|
||||
"unsupported sort field %q", sort)
|
||||
}
|
||||
|
||||
func (store *store) UpdateV2(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, data dashboardtypes.StorableDashboardData) error {
|
||||
res, err := store.
|
||||
sqlstore.
|
||||
@@ -331,3 +454,51 @@ func (store *store) RunInTx(ctx context.Context, cb func(ctx context.Context) er
|
||||
return cb(ctx)
|
||||
})
|
||||
}
|
||||
|
||||
// PinForUser combines the count check, the existence check, and the upsert in
|
||||
// a single statement so the limit gate and the insert can't drift between two
|
||||
// round-trips.
|
||||
//
|
||||
// pin exists? | count < 10? | WHERE passes? | effect | rows
|
||||
// ------------|-------------|-------------------------|-----------------------------------|-----
|
||||
// no | yes | yes (count branch) | INSERT new row | 1
|
||||
// no | no | no | nothing (limit hit) | 0
|
||||
// yes | yes | yes (count branch) | INSERT → conflict → no-op UPDATE | 1
|
||||
// yes | no | yes (EXISTS OR branch) | INSERT → conflict → no-op UPDATE | 1
|
||||
//
|
||||
// rows = 0 is the only signal of a real limit hit.
|
||||
func (store *store) PinForUser(ctx context.Context, pd *dashboardtypes.PinnedDashboard) error {
|
||||
res, err := store.sqlstore.BunDBCtx(ctx).NewRaw(`
|
||||
INSERT INTO pinned_dashboard (user_id, dashboard_id, org_id, pinned_at)
|
||||
SELECT ?, ?, ?, ?
|
||||
WHERE (SELECT COUNT(*) FROM pinned_dashboard WHERE user_id = ?) < ?
|
||||
OR EXISTS (SELECT 1 FROM pinned_dashboard WHERE user_id = ? AND dashboard_id = ?)
|
||||
ON CONFLICT (user_id, dashboard_id) DO UPDATE SET user_id = EXCLUDED.user_id
|
||||
`,
|
||||
pd.UserID, pd.DashboardID, pd.OrgID, pd.PinnedAt,
|
||||
pd.UserID, dashboardtypes.MaxPinnedDashboardsPerUser,
|
||||
pd.UserID, pd.DashboardID,
|
||||
).Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rows, err := res.RowsAffected()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if rows == 0 {
|
||||
return errors.Newf(errors.TypeAlreadyExists, dashboardtypes.ErrCodePinnedDashboardLimitHit,
|
||||
"cannot pin more than %d dashboards", dashboardtypes.MaxPinnedDashboardsPerUser)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) UnpinForUser(ctx context.Context, userID valuer.UUID, dashboardID valuer.UUID) error {
|
||||
_, err := store.sqlstore.BunDBCtx(ctx).
|
||||
NewDelete().
|
||||
Model((*dashboardtypes.PinnedDashboard)(nil)).
|
||||
Where("user_id = ?", userID).
|
||||
Where("dashboard_id = ?", dashboardID).
|
||||
Exec(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
@@ -45,6 +46,47 @@ func (handler *handler) CreateV2(rw http.ResponseWriter, r *http.Request) {
|
||||
render.Success(rw, http.StatusCreated, dashboardtypes.NewGettableDashboardV2FromDashboardV2(dashboard))
|
||||
}
|
||||
|
||||
func (handler *handler) ListV2(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
userID, err := valuer.NewUUID(claims.IdentityID())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
params := new(dashboardtypes.ListDashboardsV2Params)
|
||||
if err := binding.Query.BindQuery(r.URL.Query(), params); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
if err := params.Validate(); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
out, err := handler.module.ListV2(ctx, orgID, userID, params)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, out)
|
||||
}
|
||||
|
||||
func (handler *handler) GetV2(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
@@ -266,6 +308,60 @@ func (handler *handler) CreatePublicV2(rw http.ResponseWriter, r *http.Request)
|
||||
render.Success(rw, http.StatusOK, dashboardtypes.NewGettableDashboardV2FromDashboardV2(dashboard))
|
||||
}
|
||||
|
||||
func (handler *handler) PinV2(rw http.ResponseWriter, r *http.Request) {
|
||||
handler.pinUnpinV2(rw, r, true)
|
||||
}
|
||||
|
||||
func (handler *handler) UnpinV2(rw http.ResponseWriter, r *http.Request) {
|
||||
handler.pinUnpinV2(rw, r, false)
|
||||
}
|
||||
|
||||
func (handler *handler) pinUnpinV2(rw http.ResponseWriter, r *http.Request, pin bool) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
userID, err := valuer.NewUUID(claims.IdentityID())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
id := mux.Vars(r)["id"]
|
||||
if id == "" {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is missing in the path"))
|
||||
return
|
||||
}
|
||||
dashboardID, err := valuer.NewUUID(id)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if pin {
|
||||
err = handler.module.PinV2(ctx, orgID, userID, dashboardID)
|
||||
} else {
|
||||
err = handler.module.UnpinV2(ctx, userID, dashboardID)
|
||||
}
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (handler *handler) UpdatePublicV2(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
@@ -2,6 +2,7 @@ package impldashboard
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
@@ -47,6 +48,27 @@ func (module *module) CreateV2(ctx context.Context, orgID valuer.UUID, createdBy
|
||||
return dashboard, nil
|
||||
}
|
||||
|
||||
// ListV2 calls the store for the joined page (the store owns DSL compilation
|
||||
// and limit+1/hasMore detection), batch-fetches tags for the returned
|
||||
// dashboard ids, and hands off to the type-side constructor for assembly.
|
||||
func (module *module) ListV2(ctx context.Context, orgID valuer.UUID, userID valuer.UUID, params *dashboardtypes.ListDashboardsV2Params) (*dashboardtypes.ListableDashboardV2, error) {
|
||||
rows, hasMore, err := module.store.ListV2(ctx, orgID, userID, params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dashboardIDs := make([]valuer.UUID, len(rows))
|
||||
for i, r := range rows {
|
||||
dashboardIDs[i] = r.Dashboard.ID
|
||||
}
|
||||
tagsByEntity, err := module.tagModule.ListForEntities(ctx, dashboardIDs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return dashboardtypes.NewListableDashboardV2(rows, tagsByEntity, hasMore)
|
||||
}
|
||||
|
||||
func (module *module) GetV2(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*dashboardtypes.DashboardV2, error) {
|
||||
storable, public, err := module.store.GetV2(ctx, orgID, id)
|
||||
if err != nil {
|
||||
@@ -176,3 +198,19 @@ func (module *module) LockUnlockV2(ctx context.Context, orgID valuer.UUID, id va
|
||||
}
|
||||
return module.store.LockUnlockV2(ctx, orgID, id, lock, updatedBy)
|
||||
}
|
||||
|
||||
func (module *module) PinV2(ctx context.Context, orgID valuer.UUID, userID valuer.UUID, id valuer.UUID) error {
|
||||
if _, err := module.GetV2(ctx, orgID, id); err != nil {
|
||||
return err
|
||||
}
|
||||
return module.store.PinForUser(ctx, &dashboardtypes.PinnedDashboard{
|
||||
UserID: userID,
|
||||
DashboardID: id,
|
||||
OrgID: orgID,
|
||||
PinnedAt: time.Now(),
|
||||
})
|
||||
}
|
||||
|
||||
func (module *module) UnpinV2(ctx context.Context, userID valuer.UUID, id valuer.UUID) error {
|
||||
return module.store.UnpinForUser(ctx, userID, id)
|
||||
}
|
||||
|
||||
33
pkg/parser/filterquery/parse.go
Normal file
33
pkg/parser/filterquery/parse.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package filterquery
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
grammar "github.com/SigNoz/signoz/pkg/parser/filterquery/grammar"
|
||||
"github.com/antlr4-go/antlr/v4"
|
||||
)
|
||||
|
||||
func Parse(query string) (antlr.ParseTree, *antlr.CommonTokenStream, *ErrorCollector) {
|
||||
collector := NewErrorCollector()
|
||||
lexer := grammar.NewFilterQueryLexer(antlr.NewInputStream(query))
|
||||
lexer.RemoveErrorListeners()
|
||||
lexer.AddErrorListener(collector)
|
||||
tokens := antlr.NewCommonTokenStream(lexer, 0)
|
||||
parser := grammar.NewFilterQueryParser(tokens)
|
||||
parser.RemoveErrorListeners()
|
||||
parser.AddErrorListener(collector)
|
||||
return parser.Query(), tokens, collector
|
||||
}
|
||||
|
||||
type ErrorCollector struct {
|
||||
*antlr.DefaultErrorListener
|
||||
Errors []string
|
||||
}
|
||||
|
||||
func NewErrorCollector() *ErrorCollector {
|
||||
return &ErrorCollector{}
|
||||
}
|
||||
|
||||
func (c *ErrorCollector) SyntaxError(_ antlr.Recognizer, _ any, line, column int, msg string, _ antlr.RecognitionException) {
|
||||
c.Errors = append(c.Errors, fmt.Sprintf("syntax error at %d:%d — %s", line, column, msg))
|
||||
}
|
||||
@@ -197,6 +197,7 @@ func NewSQLMigrationProviderFactories(
|
||||
sqlmigration.NewMigrateAWSAllRegionsFactory(sqlstore),
|
||||
sqlmigration.NewAddTagsFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewAddDashboardSoftDeleteFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewAddPinnedDashboardFactory(sqlstore, sqlschema),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
71
pkg/sqlmigration/080_add_pinned_dashboard.go
Normal file
71
pkg/sqlmigration/080_add_pinned_dashboard.go
Normal file
@@ -0,0 +1,71 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type addPinnedDashboard struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
sqlschema sqlschema.SQLSchema
|
||||
}
|
||||
|
||||
func NewAddPinnedDashboardFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("add_pinned_dashboard"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return &addPinnedDashboard{
|
||||
sqlstore: sqlstore,
|
||||
sqlschema: sqlschema,
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (migration *addPinnedDashboard) Register(migrations *migrate.Migrations) error {
|
||||
return migrations.Register(migration.Up, migration.Down)
|
||||
}
|
||||
|
||||
func (migration *addPinnedDashboard) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() { _ = tx.Rollback() }()
|
||||
|
||||
// Composite PK on (user_id, dashboard_id) prevents accidental double-pins
|
||||
// for the same user/dashboard pair. Only org_id carries an FK — user_id and
|
||||
// dashboard_id mirror tag_relations and skip FKs because cascade deletes
|
||||
// are disabled at the platform level (see tags spec).
|
||||
sqls := migration.sqlschema.Operator().CreateTable(&sqlschema.Table{
|
||||
Name: "pinned_dashboard",
|
||||
Columns: []*sqlschema.Column{
|
||||
{Name: "user_id", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "dashboard_id", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "org_id", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "pinned_at", DataType: sqlschema.DataTypeTimestamp, Nullable: false, Default: "current_timestamp"},
|
||||
},
|
||||
PrimaryKeyConstraint: &sqlschema.PrimaryKeyConstraint{ColumnNames: []sqlschema.ColumnName{"user_id", "dashboard_id"}},
|
||||
ForeignKeyConstraints: []*sqlschema.ForeignKeyConstraint{
|
||||
{
|
||||
ReferencingColumnName: sqlschema.ColumnName("org_id"),
|
||||
ReferencedTableName: sqlschema.TableName("organizations"),
|
||||
ReferencedColumnName: sqlschema.ColumnName("id"),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
for _, sql := range sqls {
|
||||
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
func (migration *addPinnedDashboard) Down(_ context.Context, _ *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
124
pkg/types/dashboardtypes/list_v2.go
Normal file
124
pkg/types/dashboardtypes/list_v2.go
Normal file
@@ -0,0 +1,124 @@
|
||||
package dashboardtypes
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/tagtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
const (
|
||||
DefaultListLimit = 20
|
||||
MaxListLimit = 200
|
||||
)
|
||||
|
||||
// ListSort is the sort field for the dashboard list endpoint. The value is a
|
||||
// stable enum so callers can't ask for arbitrary columns.
|
||||
type ListSort string
|
||||
|
||||
const (
|
||||
ListSortUpdatedAt ListSort = "updated_at"
|
||||
ListSortCreatedAt ListSort = "created_at"
|
||||
ListSortName ListSort = "name"
|
||||
)
|
||||
|
||||
type ListOrder string
|
||||
|
||||
const (
|
||||
ListOrderAsc ListOrder = "asc"
|
||||
ListOrderDesc ListOrder = "desc"
|
||||
)
|
||||
|
||||
var ErrCodeDashboardListInvalid = errors.MustNewCode("dashboard_list_invalid")
|
||||
|
||||
type ListDashboardsV2Params struct {
|
||||
Query string `query:"query"`
|
||||
Sort ListSort `query:"sort"`
|
||||
Order ListOrder `query:"order"`
|
||||
Limit int `query:"limit"`
|
||||
Offset int `query:"offset"`
|
||||
}
|
||||
|
||||
// Validate fills in defaults (sort=updated_at, order=desc, limit=20) and
|
||||
// rejects out-of-allowlist sort/order values and bad limit/offset. Limit is
|
||||
// clamped to MaxListLimit on the high side. Lowercases sort/order so callers
|
||||
// can pass them in any case.
|
||||
func (p *ListDashboardsV2Params) Validate() error {
|
||||
if p.Sort == "" {
|
||||
p.Sort = ListSortUpdatedAt
|
||||
} else {
|
||||
p.Sort = ListSort(strings.ToLower(string(p.Sort)))
|
||||
switch p.Sort {
|
||||
case ListSortUpdatedAt, ListSortCreatedAt, ListSortName:
|
||||
default:
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardListInvalid,
|
||||
"invalid sort %q — expected one of: updated_at, created_at, name", p.Sort)
|
||||
}
|
||||
}
|
||||
|
||||
if p.Order == "" {
|
||||
p.Order = ListOrderDesc
|
||||
} else {
|
||||
p.Order = ListOrder(strings.ToLower(string(p.Order)))
|
||||
switch p.Order {
|
||||
case ListOrderAsc, ListOrderDesc:
|
||||
default:
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardListInvalid,
|
||||
"invalid order %q — expected asc or desc", p.Order)
|
||||
}
|
||||
}
|
||||
|
||||
if p.Limit == 0 {
|
||||
p.Limit = DefaultListLimit
|
||||
} else if p.Limit < 0 {
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardListInvalid,
|
||||
"invalid limit %d — must be a positive integer", p.Limit)
|
||||
} else if p.Limit > MaxListLimit {
|
||||
p.Limit = MaxListLimit
|
||||
}
|
||||
|
||||
if p.Offset < 0 {
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardListInvalid,
|
||||
"invalid offset %d — must be a non-negative integer", p.Offset)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type gettableDashboardWithPin struct {
|
||||
GettableDashboardV2
|
||||
Pinned bool `json:"pinned"`
|
||||
}
|
||||
|
||||
type ListableDashboardV2 struct {
|
||||
Dashboards []*gettableDashboardWithPin `json:"dashboards"`
|
||||
HasMore bool `json:"hasMore"`
|
||||
}
|
||||
|
||||
// DashboardListRow is the per-row shape Store.ListV2 returns. Bundles the
|
||||
// joined dashboard / public_dashboard / pinned_dashboard data so the module
|
||||
// layer can attach tags and assemble the gettable view.
|
||||
type DashboardListRow struct {
|
||||
Dashboard *StorableDashboard
|
||||
Public *StorablePublicDashboard // nil if no public_dashboard row exists
|
||||
Pinned bool
|
||||
}
|
||||
|
||||
func NewListableDashboardV2(rows []*DashboardListRow, tagsByEntity map[valuer.UUID][]*tagtypes.Tag, hasMore bool) (*ListableDashboardV2, error) {
|
||||
dashboards := make([]*gettableDashboardWithPin, len(rows))
|
||||
for i, r := range rows {
|
||||
v2, err := NewDashboardV2FromStorable(r.Dashboard, r.Public, tagsByEntity[r.Dashboard.ID])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dashboards[i] = &gettableDashboardWithPin{
|
||||
GettableDashboardV2: *NewGettableDashboardV2FromDashboardV2(v2),
|
||||
Pinned: r.Pinned,
|
||||
}
|
||||
}
|
||||
return &ListableDashboardV2{
|
||||
Dashboards: dashboards,
|
||||
HasMore: hasMore,
|
||||
}, nil
|
||||
}
|
||||
71
pkg/types/dashboardtypes/listfilter/constants.go
Normal file
71
pkg/types/dashboardtypes/listfilter/constants.go
Normal file
@@ -0,0 +1,71 @@
|
||||
package listfilter
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
qbtypesv5 "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
var ErrCodeDashboardListFilterInvalid = errors.MustNewCode("dashboard_list_filter_invalid")
|
||||
|
||||
// Key is one of the user-facing filter keys allowed in the dashboard list DSL.
|
||||
type Key string
|
||||
|
||||
const (
|
||||
KeyName Key = "name"
|
||||
KeyDescription Key = "description"
|
||||
KeyCreatedAt Key = "created_at"
|
||||
KeyUpdatedAt Key = "updated_at"
|
||||
KeyCreatedBy Key = "created_by"
|
||||
KeyLocked Key = "locked"
|
||||
KeyPublic Key = "public"
|
||||
KeyTag Key = "tag"
|
||||
)
|
||||
|
||||
// allowedOps lists the operators each key accepts. Mirrors the spec's
|
||||
// key/operator matrix.
|
||||
var allowedOps = map[Key]map[qbtypesv5.FilterOperator]struct{}{
|
||||
KeyName: stringSearchOps(),
|
||||
KeyDescription: stringSearchOps(),
|
||||
KeyCreatedAt: numericRangeOps(),
|
||||
KeyUpdatedAt: numericRangeOps(),
|
||||
KeyCreatedBy: stringSearchOps(),
|
||||
KeyLocked: opsSet(qbtypesv5.FilterOperatorEqual, qbtypesv5.FilterOperatorNotEqual),
|
||||
KeyPublic: opsSet(qbtypesv5.FilterOperatorEqual, qbtypesv5.FilterOperatorNotEqual),
|
||||
KeyTag: opsSet(
|
||||
qbtypesv5.FilterOperatorEqual, qbtypesv5.FilterOperatorNotEqual,
|
||||
qbtypesv5.FilterOperatorLike, qbtypesv5.FilterOperatorNotLike,
|
||||
qbtypesv5.FilterOperatorILike, qbtypesv5.FilterOperatorNotILike,
|
||||
qbtypesv5.FilterOperatorContains, qbtypesv5.FilterOperatorNotContains,
|
||||
qbtypesv5.FilterOperatorRegexp, qbtypesv5.FilterOperatorNotRegexp,
|
||||
qbtypesv5.FilterOperatorIn, qbtypesv5.FilterOperatorNotIn,
|
||||
qbtypesv5.FilterOperatorExists, qbtypesv5.FilterOperatorNotExists,
|
||||
),
|
||||
}
|
||||
|
||||
func stringSearchOps() map[qbtypesv5.FilterOperator]struct{} {
|
||||
return opsSet(
|
||||
qbtypesv5.FilterOperatorEqual, qbtypesv5.FilterOperatorNotEqual,
|
||||
qbtypesv5.FilterOperatorLike, qbtypesv5.FilterOperatorNotLike,
|
||||
qbtypesv5.FilterOperatorILike, qbtypesv5.FilterOperatorNotILike,
|
||||
qbtypesv5.FilterOperatorContains, qbtypesv5.FilterOperatorNotContains,
|
||||
qbtypesv5.FilterOperatorRegexp, qbtypesv5.FilterOperatorNotRegexp,
|
||||
qbtypesv5.FilterOperatorIn, qbtypesv5.FilterOperatorNotIn,
|
||||
)
|
||||
}
|
||||
|
||||
func numericRangeOps() map[qbtypesv5.FilterOperator]struct{} {
|
||||
return opsSet(
|
||||
qbtypesv5.FilterOperatorEqual, qbtypesv5.FilterOperatorNotEqual,
|
||||
qbtypesv5.FilterOperatorLessThan, qbtypesv5.FilterOperatorLessThanOrEq,
|
||||
qbtypesv5.FilterOperatorGreaterThan, qbtypesv5.FilterOperatorGreaterThanOrEq,
|
||||
qbtypesv5.FilterOperatorBetween, qbtypesv5.FilterOperatorNotBetween,
|
||||
)
|
||||
}
|
||||
|
||||
func opsSet(ops ...qbtypesv5.FilterOperator) map[qbtypesv5.FilterOperator]struct{} {
|
||||
m := make(map[qbtypesv5.FilterOperator]struct{}, len(ops))
|
||||
for _, op := range ops {
|
||||
m[op] = struct{}{}
|
||||
}
|
||||
return m
|
||||
}
|
||||
39
pkg/types/dashboardtypes/listfilter/listfilter.go
Normal file
39
pkg/types/dashboardtypes/listfilter/listfilter.go
Normal file
@@ -0,0 +1,39 @@
|
||||
package listfilter
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
)
|
||||
|
||||
type Compiled struct {
|
||||
SQL string
|
||||
Args []any
|
||||
}
|
||||
|
||||
func Compile(query string, formatter sqlstore.SQLFormatter) (*Compiled, error) {
|
||||
if len(query) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
queryVisitor := newVisitor(formatter)
|
||||
frag, syntaxErrs := queryVisitor.compile(query)
|
||||
|
||||
if len(syntaxErrs) > 0 {
|
||||
return nil, errors.NewInvalidInputf(ErrCodeDashboardListFilterInvalid,
|
||||
"invalid filter query: %s", strings.Join(syntaxErrs, "; "))
|
||||
}
|
||||
if len(queryVisitor.errors) > 0 {
|
||||
return nil, errors.NewInvalidInputf(ErrCodeDashboardListFilterInvalid,
|
||||
"invalid filter query: %s", strings.Join(queryVisitor.errors, "; "))
|
||||
}
|
||||
if frag == nil || frag.sql == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return &Compiled{
|
||||
SQL: frag.sql,
|
||||
Args: frag.args,
|
||||
}, nil
|
||||
}
|
||||
440
pkg/types/dashboardtypes/listfilter/listfilter_test.go
Normal file
440
pkg/types/dashboardtypes/listfilter/listfilter_test.go
Normal file
@@ -0,0 +1,440 @@
|
||||
package listfilter
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/DATA-DOG/go-sqlmock"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstoretest"
|
||||
)
|
||||
|
||||
type compileCase struct {
|
||||
subtestName string
|
||||
dslQueryToCompile string
|
||||
nilExpected bool
|
||||
expectedSQL string
|
||||
expectedArgs []any
|
||||
expectedErrShouldContain string
|
||||
}
|
||||
|
||||
func runCompileCases(t *testing.T, cases []compileCase) {
|
||||
t.Helper()
|
||||
for _, c := range cases {
|
||||
t.Run(c.subtestName, func(t *testing.T) {
|
||||
out, err := Compile(c.dslQueryToCompile, formatter(t))
|
||||
|
||||
if c.expectedErrShouldContain != "" {
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, strings.ToLower(err.Error()), strings.ToLower(c.expectedErrShouldContain))
|
||||
return
|
||||
}
|
||||
|
||||
require.NoError(t, err)
|
||||
if c.nilExpected {
|
||||
assert.Nil(t, out)
|
||||
return
|
||||
}
|
||||
require.NotNil(t, out)
|
||||
|
||||
if c.expectedSQL != "" {
|
||||
assert.Equal(t, normalizeSQL(c.expectedSQL), normalizeSQL(out.SQL))
|
||||
}
|
||||
if c.expectedArgs != nil {
|
||||
require.Len(t, out.Args, len(c.expectedArgs))
|
||||
for i, want := range c.expectedArgs {
|
||||
// time.Time values can carry semantically-equal instants
|
||||
// in different *Location representations (UTC vs Local vs
|
||||
// FixedZone). Compare via .Equal() instead of DeepEqual.
|
||||
if wantT, ok := want.(time.Time); ok {
|
||||
gotT, ok := out.Args[i].(time.Time)
|
||||
require.True(t, ok, "arg[%d]: want time.Time, got %T", i, out.Args[i])
|
||||
assert.True(t, wantT.Equal(gotT), "arg[%d]: want %s, got %s", i, wantT, gotT)
|
||||
continue
|
||||
}
|
||||
assert.Equal(t, want, out.Args[i], "arg[%d]", i)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompile_Empty(t *testing.T) {
|
||||
runCompileCases(t, []compileCase{
|
||||
{subtestName: "empty query yields nil", dslQueryToCompile: "", nilExpected: true},
|
||||
})
|
||||
}
|
||||
|
||||
func TestCompile_Name(t *testing.T) {
|
||||
runCompileCases(t, []compileCase{
|
||||
{
|
||||
subtestName: "name =",
|
||||
dslQueryToCompile: `name = 'overview'`,
|
||||
expectedSQL: `json_extract("d"."data", '$.data.display.name') = ?`,
|
||||
expectedArgs: []any{"overview"},
|
||||
},
|
||||
{
|
||||
// QUOTED_TEXT in the grammar covers both '…' and "…" — visitor
|
||||
// strips whichever quote pair surrounds the value.
|
||||
subtestName: "name = with double-quoted value",
|
||||
dslQueryToCompile: `name = "something"`,
|
||||
expectedSQL: `json_extract("d"."data", '$.data.display.name') = ?`,
|
||||
expectedArgs: []any{"something"},
|
||||
},
|
||||
{
|
||||
subtestName: "name CONTAINS",
|
||||
dslQueryToCompile: `name CONTAINS 'overview'`,
|
||||
expectedSQL: `json_extract("d"."data", '$.data.display.name') LIKE ?`,
|
||||
expectedArgs: []any{"%overview%"},
|
||||
},
|
||||
{
|
||||
subtestName: "name ILIKE — emitted as LOWER(col) LIKE LOWER(?) for dialect parity",
|
||||
dslQueryToCompile: `name ILIKE 'Prod%'`,
|
||||
expectedSQL: `lower(json_extract("d"."data", '$.data.display.name')) LIKE LOWER(?)`,
|
||||
expectedArgs: []any{"Prod%"},
|
||||
},
|
||||
{
|
||||
subtestName: "CONTAINS escapes % in user input",
|
||||
dslQueryToCompile: `name CONTAINS '50%'`,
|
||||
expectedSQL: `json_extract("d"."data", '$.data.display.name') LIKE ?`,
|
||||
expectedArgs: []any{`%50\%%`},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestCompile_CreatedByLocked(t *testing.T) {
|
||||
runCompileCases(t, []compileCase{
|
||||
{
|
||||
subtestName: "created_by LIKE",
|
||||
dslQueryToCompile: `created_by LIKE '%@signoz.io'`,
|
||||
expectedSQL: `d.created_by LIKE ?`,
|
||||
expectedArgs: []any{"%@signoz.io"},
|
||||
},
|
||||
{
|
||||
subtestName: "locked = true",
|
||||
dslQueryToCompile: `locked = true`,
|
||||
expectedSQL: `d.locked = ?`,
|
||||
expectedArgs: []any{true},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestCompile_Public(t *testing.T) {
|
||||
runCompileCases(t, []compileCase{
|
||||
{subtestName: "public = true", dslQueryToCompile: `public = true`, expectedSQL: `pd.id IS NOT NULL`},
|
||||
{subtestName: "public = false", dslQueryToCompile: `public = false`, expectedSQL: `pd.id IS NULL`},
|
||||
{subtestName: "public != true", dslQueryToCompile: `public != true`, expectedSQL: `pd.id IS NULL`},
|
||||
})
|
||||
}
|
||||
|
||||
func TestCompile_Timestamps(t *testing.T) {
|
||||
ist := time.FixedZone("+05:30", 5*60*60+30*60)
|
||||
runCompileCases(t, []compileCase{
|
||||
{
|
||||
subtestName: "created_at >= RFC3339",
|
||||
dslQueryToCompile: `created_at >= '2026-03-10T00:00:00Z'`,
|
||||
expectedSQL: `d.created_at >= ?`,
|
||||
expectedArgs: []any{time.Date(2026, 3, 10, 0, 0, 0, 0, time.UTC)},
|
||||
},
|
||||
{
|
||||
subtestName: "updated_at BETWEEN",
|
||||
dslQueryToCompile: `updated_at BETWEEN '2026-03-10T00:00:00Z' AND '2026-03-20T00:00:00Z'`,
|
||||
expectedSQL: `d.updated_at BETWEEN ? AND ?`,
|
||||
expectedArgs: []any{
|
||||
time.Date(2026, 3, 10, 0, 0, 0, 0, time.UTC),
|
||||
time.Date(2026, 3, 20, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
{
|
||||
subtestName: "created_at >= IST timestamp",
|
||||
dslQueryToCompile: `created_at >= '2026-03-10T05:30:00+05:30'`,
|
||||
expectedSQL: `d.created_at >= ?`,
|
||||
expectedArgs: []any{time.Date(2026, 3, 10, 5, 30, 0, 0, ist)},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Tag operators wrap each predicate in EXISTS / NOT EXISTS.
|
||||
func TestCompile_Tag(t *testing.T) {
|
||||
runCompileCases(t, []compileCase{
|
||||
{
|
||||
subtestName: "tag = wraps in EXISTS",
|
||||
dslQueryToCompile: `tag = 'database'`,
|
||||
expectedSQL: `
|
||||
EXISTS (
|
||||
SELECT 1 FROM tag_relations tr
|
||||
JOIN tag t ON t.id = tr.tag_id
|
||||
WHERE tr.entity_id = d.id AND t.name = ?
|
||||
)`,
|
||||
expectedArgs: []any{"database"},
|
||||
},
|
||||
{
|
||||
subtestName: "tag != wraps in NOT EXISTS with positive inner",
|
||||
dslQueryToCompile: `tag != 'database'`,
|
||||
expectedSQL: `
|
||||
NOT EXISTS (
|
||||
SELECT 1 FROM tag_relations tr
|
||||
JOIN tag t ON t.id = tr.tag_id
|
||||
WHERE tr.entity_id = d.id AND t.name = ?
|
||||
)`,
|
||||
expectedArgs: []any{"database"},
|
||||
},
|
||||
{
|
||||
subtestName: "tag IN — inner is single placeholder list",
|
||||
dslQueryToCompile: `tag IN ['team/pulse', 'team/events']`,
|
||||
expectedSQL: `
|
||||
EXISTS (
|
||||
SELECT 1 FROM tag_relations tr
|
||||
JOIN tag t ON t.id = tr.tag_id
|
||||
WHERE tr.entity_id = d.id AND t.name IN (?, ?)
|
||||
)`,
|
||||
expectedArgs: []any{"team/pulse", "team/events"},
|
||||
},
|
||||
{
|
||||
subtestName: "tag NOT IN",
|
||||
dslQueryToCompile: `tag NOT IN ['database/redis', 'database/mongo']`,
|
||||
expectedSQL: `
|
||||
NOT EXISTS (
|
||||
SELECT 1 FROM tag_relations tr
|
||||
JOIN tag t ON t.id = tr.tag_id
|
||||
WHERE tr.entity_id = d.id AND t.name IN (?, ?)
|
||||
)`,
|
||||
expectedArgs: []any{"database/redis", "database/mongo"},
|
||||
},
|
||||
{
|
||||
subtestName: "tag LIKE — hierarchy match",
|
||||
dslQueryToCompile: `tag LIKE 'prod/%'`,
|
||||
expectedSQL: `
|
||||
EXISTS (
|
||||
SELECT 1 FROM tag_relations tr
|
||||
JOIN tag t ON t.id = tr.tag_id
|
||||
WHERE tr.entity_id = d.id AND t.name LIKE ?
|
||||
)`,
|
||||
expectedArgs: []any{"prod/%"},
|
||||
},
|
||||
{
|
||||
subtestName: "tag NOT LIKE",
|
||||
dslQueryToCompile: `tag NOT LIKE 'tests/%'`,
|
||||
expectedSQL: `
|
||||
NOT EXISTS (
|
||||
SELECT 1 FROM tag_relations tr
|
||||
JOIN tag t ON t.id = tr.tag_id
|
||||
WHERE tr.entity_id = d.id AND t.name LIKE ?
|
||||
)`,
|
||||
expectedArgs: []any{"tests/%"},
|
||||
},
|
||||
{
|
||||
subtestName: "tag EXISTS — bare predicate, no tag table join needed",
|
||||
dslQueryToCompile: `tag EXISTS`,
|
||||
expectedSQL: `EXISTS (SELECT 1 FROM tag_relations tr WHERE tr.entity_id = d.id)`,
|
||||
expectedArgs: []any{},
|
||||
},
|
||||
{
|
||||
subtestName: "tag NOT EXISTS",
|
||||
dslQueryToCompile: `tag NOT EXISTS`,
|
||||
expectedSQL: `NOT EXISTS (SELECT 1 FROM tag_relations tr WHERE tr.entity_id = d.id)`,
|
||||
expectedArgs: []any{},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestCompile_BooleanComposition(t *testing.T) {
|
||||
runCompileCases(t, []compileCase{
|
||||
{
|
||||
subtestName: "AND chain — flat arg list",
|
||||
dslQueryToCompile: `locked = true AND public = true`,
|
||||
expectedSQL: `d.locked = ? AND pd.id IS NOT NULL`,
|
||||
expectedArgs: []any{true},
|
||||
},
|
||||
{
|
||||
subtestName: "OR chain",
|
||||
dslQueryToCompile: `locked = true OR public = true`,
|
||||
expectedSQL: `d.locked = ? OR pd.id IS NOT NULL`,
|
||||
expectedArgs: []any{true},
|
||||
},
|
||||
{
|
||||
subtestName: "parens preserve precedence",
|
||||
dslQueryToCompile: `(locked = true OR public = true) AND created_by = 'a@b.com'`,
|
||||
expectedSQL: `(d.locked = ? OR pd.id IS NOT NULL) AND d.created_by = ?`,
|
||||
expectedArgs: []any{true, "a@b.com"},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Distinct from operator-suffix negation (NOT IN / NOT LIKE / NOT EXISTS).
|
||||
// Driven by the unaryExpression rule (`NOT? primary`), so NOT binds to
|
||||
// exactly one primary and only widens via parens.
|
||||
func TestCompile_NOT(t *testing.T) {
|
||||
runCompileCases(t, []compileCase{
|
||||
{
|
||||
subtestName: "NOT on a single comparison",
|
||||
dslQueryToCompile: `NOT name = 'foo'`,
|
||||
expectedSQL: `NOT (json_extract("d"."data", '$.data.display.name') = ?)`,
|
||||
expectedArgs: []any{"foo"},
|
||||
},
|
||||
{
|
||||
subtestName: "NOT binds tightly to its primary in an AND chain",
|
||||
dslQueryToCompile: `NOT name = 'foo' AND created_by = 'alice'`,
|
||||
expectedSQL: `NOT (json_extract("d"."data", '$.data.display.name') = ?) AND d.created_by = ?`,
|
||||
expectedArgs: []any{"foo", "alice"},
|
||||
},
|
||||
{
|
||||
subtestName: "NOT applied to the second term in an AND chain",
|
||||
dslQueryToCompile: `locked = true AND NOT name = 'foo'`,
|
||||
expectedSQL: `d.locked = ? AND NOT (json_extract("d"."data", '$.data.display.name') = ?)`,
|
||||
expectedArgs: []any{true, "foo"},
|
||||
},
|
||||
{
|
||||
subtestName: "NOT around a parenthesized OR",
|
||||
dslQueryToCompile: `NOT (locked = true OR public = true)`,
|
||||
expectedSQL: `NOT ((d.locked = ? OR pd.id IS NOT NULL))`,
|
||||
expectedArgs: []any{true},
|
||||
},
|
||||
{
|
||||
subtestName: "double NOT via parens",
|
||||
dslQueryToCompile: `NOT (NOT name = 'foo')`,
|
||||
expectedSQL: `NOT ((NOT (json_extract("d"."data", '$.data.display.name') = ?)))`,
|
||||
expectedArgs: []any{"foo"},
|
||||
},
|
||||
{
|
||||
subtestName: "NOT on a tag equality",
|
||||
dslQueryToCompile: `NOT tag = 'database'`,
|
||||
expectedSQL: `
|
||||
NOT (
|
||||
EXISTS (
|
||||
SELECT 1 FROM tag_relations tr
|
||||
JOIN tag t ON t.id = tr.tag_id
|
||||
WHERE tr.entity_id = d.id AND t.name = ?
|
||||
)
|
||||
)`,
|
||||
expectedArgs: []any{"database"},
|
||||
},
|
||||
{
|
||||
subtestName: "NOT tag = ... AND name = ...",
|
||||
dslQueryToCompile: `NOT tag = 'database' AND name = 'overview'`,
|
||||
expectedSQL: `
|
||||
NOT (
|
||||
EXISTS (
|
||||
SELECT 1 FROM tag_relations tr
|
||||
JOIN tag t ON t.id = tr.tag_id
|
||||
WHERE tr.entity_id = d.id AND t.name = ?
|
||||
)
|
||||
)
|
||||
AND json_extract("d"."data", '$.data.display.name') = ?`,
|
||||
expectedArgs: []any{"database", "overview"},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestCompile_ComplexExamples(t *testing.T) {
|
||||
runCompileCases(t, []compileCase{
|
||||
{
|
||||
subtestName: "name CONTAINS + tag LIKE + created_by + tag",
|
||||
dslQueryToCompile: `name CONTAINS 'overview' AND tag LIKE 'prod/%' AND created_by = 'naman.verma@signoz.io' AND tag = 'database'`,
|
||||
expectedSQL: `
|
||||
json_extract("d"."data", '$.data.display.name') LIKE ?
|
||||
AND EXISTS (
|
||||
SELECT 1 FROM tag_relations tr
|
||||
JOIN tag t ON t.id = tr.tag_id
|
||||
WHERE tr.entity_id = d.id AND t.name LIKE ?
|
||||
)
|
||||
AND d.created_by = ?
|
||||
AND EXISTS (
|
||||
SELECT 1 FROM tag_relations tr
|
||||
JOIN tag t ON t.id = tr.tag_id
|
||||
WHERE tr.entity_id = d.id AND t.name = ?
|
||||
)`,
|
||||
expectedArgs: []any{"%overview%", "prod/%", "naman.verma@signoz.io", "database"},
|
||||
},
|
||||
{
|
||||
subtestName: "tag IN AND tag =",
|
||||
dslQueryToCompile: `tag IN ['team/pulse', 'team/events'] AND tag = 'database'`,
|
||||
expectedSQL: `
|
||||
EXISTS (
|
||||
SELECT 1 FROM tag_relations tr
|
||||
JOIN tag t ON t.id = tr.tag_id
|
||||
WHERE tr.entity_id = d.id AND t.name IN (?, ?)
|
||||
)
|
||||
AND EXISTS (
|
||||
SELECT 1 FROM tag_relations tr
|
||||
JOIN tag t ON t.id = tr.tag_id
|
||||
WHERE tr.entity_id = d.id AND t.name = ?
|
||||
)`,
|
||||
expectedArgs: []any{"team/pulse", "team/events", "database"},
|
||||
},
|
||||
{
|
||||
subtestName: "nested OR / AND with parens",
|
||||
dslQueryToCompile: `(tag IN ['sql', 'redis', 'mongo'] OR name LIKE '%database%') AND (tag = 'team/pulse' OR name LIKE '%pulse%')`,
|
||||
expectedSQL: `
|
||||
(
|
||||
EXISTS (
|
||||
SELECT 1 FROM tag_relations tr
|
||||
JOIN tag t ON t.id = tr.tag_id
|
||||
WHERE tr.entity_id = d.id AND t.name IN (?, ?, ?)
|
||||
)
|
||||
OR json_extract("d"."data", '$.data.display.name') LIKE ?
|
||||
)
|
||||
AND (
|
||||
EXISTS (
|
||||
SELECT 1 FROM tag_relations tr
|
||||
JOIN tag t ON t.id = tr.tag_id
|
||||
WHERE tr.entity_id = d.id AND t.name = ?
|
||||
)
|
||||
OR json_extract("d"."data", '$.data.display.name') LIKE ?
|
||||
)`,
|
||||
expectedArgs: []any{"sql", "redis", "mongo", "%database%", "team/pulse", "%pulse%"},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestCompile_Rejections(t *testing.T) {
|
||||
runCompileCases(t, []compileCase{
|
||||
{
|
||||
subtestName: "rejects unknown key",
|
||||
dslQueryToCompile: `foo = 'bar'`,
|
||||
expectedErrShouldContain: "unknown key",
|
||||
},
|
||||
{
|
||||
subtestName: "rejects op outside per-key allowlist",
|
||||
dslQueryToCompile: `name BETWEEN 'a' AND 'z'`,
|
||||
expectedErrShouldContain: "operator",
|
||||
},
|
||||
{
|
||||
subtestName: "rejects non-bool on locked",
|
||||
dslQueryToCompile: `locked = 'yes'`,
|
||||
expectedErrShouldContain: "boolean",
|
||||
},
|
||||
{
|
||||
subtestName: "rejects non-RFC3339 timestamp",
|
||||
dslQueryToCompile: `created_at >= 'not-a-date'`,
|
||||
expectedErrShouldContain: "RFC3339",
|
||||
},
|
||||
{
|
||||
subtestName: "rejects REGEXP — not yet supported",
|
||||
dslQueryToCompile: `name REGEXP '.*'`,
|
||||
expectedErrShouldContain: "REGEXP",
|
||||
},
|
||||
{
|
||||
subtestName: "rejects syntax error from grammar",
|
||||
dslQueryToCompile: `name = `,
|
||||
expectedErrShouldContain: "syntax",
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func formatter(t *testing.T) sqlstore.SQLFormatter {
|
||||
t.Helper()
|
||||
p := sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual)
|
||||
return p.Formatter()
|
||||
}
|
||||
|
||||
func normalizeSQL(s string) string {
|
||||
s = strings.Join(strings.Fields(s), " ")
|
||||
s = strings.ReplaceAll(s, "( ", "(")
|
||||
s = strings.ReplaceAll(s, " )", ")")
|
||||
return s
|
||||
}
|
||||
579
pkg/types/dashboardtypes/listfilter/visitor.go
Normal file
579
pkg/types/dashboardtypes/listfilter/visitor.go
Normal file
@@ -0,0 +1,579 @@
|
||||
package listfilter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/parser/filterquery"
|
||||
grammar "github.com/SigNoz/signoz/pkg/parser/filterquery/grammar"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
qbtypesv5 "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/antlr4-go/antlr/v4"
|
||||
)
|
||||
|
||||
// fragment is one composable WHERE fragment. sql uses `?` placeholders;
|
||||
// args lines up positionally with the placeholders.
|
||||
type fragment struct {
|
||||
sql string
|
||||
args []any
|
||||
}
|
||||
|
||||
func newFragment(sql string, args ...any) *fragment {
|
||||
return &fragment{sql: sql, args: args}
|
||||
}
|
||||
|
||||
type visitor struct {
|
||||
grammar.BaseFilterQueryVisitor
|
||||
formatter sqlstore.SQLFormatter
|
||||
errors []string
|
||||
}
|
||||
|
||||
func newVisitor(formatter sqlstore.SQLFormatter) *visitor {
|
||||
return &visitor{
|
||||
formatter: formatter,
|
||||
}
|
||||
}
|
||||
|
||||
// Emitted WHERE fragment uses aliases `d` (dashboard) and `pd` (public_dashboard).
|
||||
func (v *visitor) compile(query string) (*fragment, []string) {
|
||||
tree, _, collector := filterquery.Parse(query)
|
||||
if len(collector.Errors) > 0 {
|
||||
return nil, collector.Errors
|
||||
}
|
||||
frag, _ := v.visit(tree).(*fragment)
|
||||
return frag, nil
|
||||
}
|
||||
|
||||
func (v *visitor) visit(tree antlr.ParseTree) any {
|
||||
if tree == nil {
|
||||
return nil
|
||||
}
|
||||
return tree.Accept(v)
|
||||
}
|
||||
|
||||
// ════════════════════════════════════════════════════════════════════════
|
||||
// methods from grammar.BaseFilterQueryVisitor that are overridden
|
||||
// ════════════════════════════════════════════════════════════════════════
|
||||
|
||||
func (v *visitor) VisitQuery(ctx *grammar.QueryContext) any {
|
||||
return v.visit(ctx.Expression())
|
||||
}
|
||||
|
||||
func (v *visitor) VisitExpression(ctx *grammar.ExpressionContext) any {
|
||||
return v.visit(ctx.OrExpression())
|
||||
}
|
||||
|
||||
func (v *visitor) VisitOrExpression(ctx *grammar.OrExpressionContext) any {
|
||||
parts := ctx.AllAndExpression()
|
||||
frags := make([]*fragment, 0, len(parts))
|
||||
for _, p := range parts {
|
||||
if f, ok := v.visit(p).(*fragment); ok && f != nil {
|
||||
frags = append(frags, f)
|
||||
}
|
||||
}
|
||||
return joinFragments(frags, "OR")
|
||||
}
|
||||
|
||||
func (v *visitor) VisitAndExpression(ctx *grammar.AndExpressionContext) any {
|
||||
parts := ctx.AllUnaryExpression()
|
||||
frags := make([]*fragment, 0, len(parts))
|
||||
for _, p := range parts {
|
||||
if f, ok := v.visit(p).(*fragment); ok && f != nil {
|
||||
frags = append(frags, f)
|
||||
}
|
||||
}
|
||||
return joinFragments(frags, "AND")
|
||||
}
|
||||
|
||||
func (v *visitor) VisitUnaryExpression(ctx *grammar.UnaryExpressionContext) any {
|
||||
f, _ := v.visit(ctx.Primary()).(*fragment)
|
||||
if f == nil {
|
||||
return nil
|
||||
}
|
||||
if ctx.NOT() != nil {
|
||||
return newFragment("NOT ("+f.sql+")", f.args...)
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
func (v *visitor) VisitPrimary(ctx *grammar.PrimaryContext) any {
|
||||
if ctx.OrExpression() != nil {
|
||||
f, _ := v.visit(ctx.OrExpression()).(*fragment)
|
||||
if f == nil {
|
||||
return nil
|
||||
}
|
||||
return newFragment("("+f.sql+")", f.args...)
|
||||
}
|
||||
if ctx.Comparison() != nil {
|
||||
return v.visit(ctx.Comparison())
|
||||
}
|
||||
// Bare keys, values, full text, and function calls are not part of the
|
||||
// dashboard list DSL.
|
||||
v.addErr("unsupported expression %q — every term must be of the form `key OP value`", ctx.GetText())
|
||||
return nil
|
||||
}
|
||||
|
||||
// VisitComparison dispatches a single `key OP value` term to the per-key
|
||||
// emitter. It also enforces the operator allowlist.
|
||||
func (v *visitor) VisitComparison(ctx *grammar.ComparisonContext) any {
|
||||
key, ok := v.parseKey(ctx)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
op, ok := v.opFromContext(ctx)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
if _, allowed := allowedOps[key][op]; !allowed {
|
||||
v.addErr("operator %s is not allowed for key %q", opName(op), key)
|
||||
return nil
|
||||
}
|
||||
|
||||
switch key {
|
||||
case KeyName:
|
||||
return v.emitJSONStringComparison(ctx, op, "$.data.display.name")
|
||||
case KeyDescription:
|
||||
return v.emitJSONStringComparison(ctx, op, "$.data.display.description")
|
||||
case KeyCreatedAt:
|
||||
return v.emitTimestampComparison(ctx, op, "d.created_at")
|
||||
case KeyUpdatedAt:
|
||||
return v.emitTimestampComparison(ctx, op, "d.updated_at")
|
||||
case KeyCreatedBy:
|
||||
return v.emitStringComparison(ctx, op, "d.created_by")
|
||||
case KeyLocked:
|
||||
return v.emitBoolComparison(ctx, op, "d.locked")
|
||||
case KeyPublic:
|
||||
return v.emitPublicComparison(ctx, op)
|
||||
case KeyTag:
|
||||
return v.emitTagComparison(ctx, op)
|
||||
}
|
||||
v.addErr("unhandled key %q", key)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *visitor) parseKey(ctx *grammar.ComparisonContext) (Key, bool) {
|
||||
keyText := strings.ToLower(strings.TrimSpace(ctx.Key().GetText()))
|
||||
k := Key(keyText)
|
||||
if _, ok := allowedOps[k]; !ok {
|
||||
v.addErr("unknown key %q — allowed: name, description, created_at, updated_at, created_by, locked, public, tag", keyText)
|
||||
return "", false
|
||||
}
|
||||
return k, true
|
||||
}
|
||||
|
||||
func (v *visitor) opFromContext(ctx *grammar.ComparisonContext) (qbtypesv5.FilterOperator, bool) {
|
||||
switch {
|
||||
case ctx.EQUALS() != nil:
|
||||
return qbtypesv5.FilterOperatorEqual, true
|
||||
case ctx.NOT_EQUALS() != nil, ctx.NEQ() != nil:
|
||||
return qbtypesv5.FilterOperatorNotEqual, true
|
||||
case ctx.LT() != nil:
|
||||
return qbtypesv5.FilterOperatorLessThan, true
|
||||
case ctx.LE() != nil:
|
||||
return qbtypesv5.FilterOperatorLessThanOrEq, true
|
||||
case ctx.GT() != nil:
|
||||
return qbtypesv5.FilterOperatorGreaterThan, true
|
||||
case ctx.GE() != nil:
|
||||
return qbtypesv5.FilterOperatorGreaterThanOrEq, true
|
||||
case ctx.BETWEEN() != nil:
|
||||
if ctx.NOT() != nil {
|
||||
return qbtypesv5.FilterOperatorNotBetween, true
|
||||
}
|
||||
return qbtypesv5.FilterOperatorBetween, true
|
||||
case ctx.LIKE() != nil:
|
||||
if ctx.NOT() != nil {
|
||||
return qbtypesv5.FilterOperatorNotLike, true
|
||||
}
|
||||
return qbtypesv5.FilterOperatorLike, true
|
||||
case ctx.ILIKE() != nil:
|
||||
if ctx.NOT() != nil {
|
||||
return qbtypesv5.FilterOperatorNotILike, true
|
||||
}
|
||||
return qbtypesv5.FilterOperatorILike, true
|
||||
case ctx.CONTAINS() != nil:
|
||||
if ctx.NOT() != nil {
|
||||
return qbtypesv5.FilterOperatorNotContains, true
|
||||
}
|
||||
return qbtypesv5.FilterOperatorContains, true
|
||||
case ctx.REGEXP() != nil:
|
||||
if ctx.NOT() != nil {
|
||||
return qbtypesv5.FilterOperatorNotRegexp, true
|
||||
}
|
||||
return qbtypesv5.FilterOperatorRegexp, true
|
||||
case ctx.InClause() != nil:
|
||||
return qbtypesv5.FilterOperatorIn, true
|
||||
case ctx.NotInClause() != nil:
|
||||
return qbtypesv5.FilterOperatorNotIn, true
|
||||
case ctx.EXISTS() != nil:
|
||||
if ctx.NOT() != nil {
|
||||
return qbtypesv5.FilterOperatorNotExists, true
|
||||
}
|
||||
return qbtypesv5.FilterOperatorExists, true
|
||||
}
|
||||
v.addErr("could not determine operator in expression %q", ctx.GetText())
|
||||
return qbtypesv5.FilterOperatorUnknown, false
|
||||
}
|
||||
|
||||
// ─── per-key emitters ────────────────────────────────────────────────────────
|
||||
|
||||
func (v *visitor) emitJSONStringComparison(ctx *grammar.ComparisonContext, op qbtypesv5.FilterOperator, jsonPath string) *fragment {
|
||||
colExpr := string(v.formatter.JSONExtractString("d.data", jsonPath))
|
||||
return v.emitStringOp(ctx, op, colExpr, string(KeyName))
|
||||
}
|
||||
|
||||
func (v *visitor) emitStringComparison(ctx *grammar.ComparisonContext, op qbtypesv5.FilterOperator, colExpr string) *fragment {
|
||||
return v.emitStringOp(ctx, op, colExpr, string(KeyCreatedBy))
|
||||
}
|
||||
|
||||
// emitStringOp covers all the operators the spec allows on text-shaped keys
|
||||
// (name, description, created_by). Tag uses a separate emitter that wraps each
|
||||
// produced fragment in an EXISTS subquery.
|
||||
func (v *visitor) emitStringOp(ctx *grammar.ComparisonContext, op qbtypesv5.FilterOperator, colExpr, keyForErr string) *fragment {
|
||||
switch op {
|
||||
case qbtypesv5.FilterOperatorEqual, qbtypesv5.FilterOperatorNotEqual,
|
||||
qbtypesv5.FilterOperatorLike, qbtypesv5.FilterOperatorNotLike:
|
||||
val, ok := v.singleString(ctx, keyForErr)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return newFragment(colExpr+" "+opName(op)+" ?", val)
|
||||
case qbtypesv5.FilterOperatorILike, qbtypesv5.FilterOperatorNotILike:
|
||||
val, ok := v.singleString(ctx, keyForErr)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
// SQLite has no ILIKE keyword and Postgres LIKE is case-sensitive — emit
|
||||
// LOWER(col) LIKE LOWER(?) so behavior is identical on both dialects.
|
||||
lowerCol := string(v.formatter.LowerExpression(colExpr))
|
||||
return newFragment(lowerCol+" "+opName(iLikeToLike(op))+" LOWER(?)", val)
|
||||
case qbtypesv5.FilterOperatorContains, qbtypesv5.FilterOperatorNotContains:
|
||||
val, ok := v.singleString(ctx, keyForErr)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return newFragment(colExpr+" "+opName(containsToLike(op))+" ?", "%"+escapeLike(val)+"%")
|
||||
case qbtypesv5.FilterOperatorRegexp, qbtypesv5.FilterOperatorNotRegexp:
|
||||
v.addErr("REGEXP filtering on %q is not yet supported", keyForErr)
|
||||
return nil
|
||||
case qbtypesv5.FilterOperatorIn, qbtypesv5.FilterOperatorNotIn:
|
||||
vals, ok := v.stringList(ctx, keyForErr)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return inFragment(colExpr, op, vals)
|
||||
}
|
||||
v.addErr("operator %s on %q is not implemented", opName(op), keyForErr)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *visitor) emitTimestampComparison(ctx *grammar.ComparisonContext, op qbtypesv5.FilterOperator, colExpr string) *fragment {
|
||||
switch op {
|
||||
case qbtypesv5.FilterOperatorEqual, qbtypesv5.FilterOperatorNotEqual,
|
||||
qbtypesv5.FilterOperatorLessThan, qbtypesv5.FilterOperatorLessThanOrEq,
|
||||
qbtypesv5.FilterOperatorGreaterThan, qbtypesv5.FilterOperatorGreaterThanOrEq:
|
||||
t, ok := v.singleTimestamp(ctx)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return newFragment(colExpr+" "+opName(op)+" ?", t)
|
||||
case qbtypesv5.FilterOperatorBetween, qbtypesv5.FilterOperatorNotBetween:
|
||||
ts, ok := v.twoTimestamps(ctx)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return newFragment(colExpr+" "+opName(op)+" ? AND ?", ts[0], ts[1])
|
||||
}
|
||||
v.addErr("operator %s on timestamp is not implemented", opName(op))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *visitor) emitBoolComparison(ctx *grammar.ComparisonContext, op qbtypesv5.FilterOperator, colExpr string) *fragment {
|
||||
b, ok := v.singleBool(ctx)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return newFragment(colExpr+" "+opName(op)+" ?", b)
|
||||
}
|
||||
|
||||
// emitPublicComparison renders `public = true|false` against the LEFT-joined
|
||||
// public_dashboard alias `pd`. The spec says public is a virtual column whose
|
||||
// truthiness is the existence of a row in public_dashboard.
|
||||
func (v *visitor) emitPublicComparison(ctx *grammar.ComparisonContext, op qbtypesv5.FilterOperator) *fragment {
|
||||
b, ok := v.singleBool(ctx)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
want := b
|
||||
if op == qbtypesv5.FilterOperatorNotEqual {
|
||||
want = !b
|
||||
}
|
||||
if want {
|
||||
return newFragment("pd.id IS NOT NULL")
|
||||
}
|
||||
return newFragment("pd.id IS NULL")
|
||||
}
|
||||
|
||||
const tagSubqueryPrefix = "SELECT 1 FROM tag_relations tr JOIN tag t ON t.id = tr.tag_id WHERE tr.entity_id = d.id"
|
||||
|
||||
// emitTagComparison wraps the inner predicate in EXISTS (or NOT EXISTS for the
|
||||
// negated operators). The inner predicate compares against `t.name` so that
|
||||
// LIKE/ILIKE patterns the user types match the stored display casing.
|
||||
func (v *visitor) emitTagComparison(ctx *grammar.ComparisonContext, op qbtypesv5.FilterOperator) *fragment {
|
||||
if op == qbtypesv5.FilterOperatorExists {
|
||||
return newFragment("EXISTS (SELECT 1 FROM tag_relations tr WHERE tr.entity_id = d.id)")
|
||||
}
|
||||
if op == qbtypesv5.FilterOperatorNotExists {
|
||||
return newFragment("NOT EXISTS (SELECT 1 FROM tag_relations tr WHERE tr.entity_id = d.id)")
|
||||
}
|
||||
|
||||
// All other tag operators take the positive form of the inner predicate
|
||||
// and toggle the EXISTS wrapper for negation. Inverse() flips Not<X> → <X>.
|
||||
negated := op.IsNegativeOperator()
|
||||
posOp := op
|
||||
if negated {
|
||||
posOp = op.Inverse()
|
||||
}
|
||||
inner := v.emitStringOp(ctx, posOp, "t.name", string(KeyTag))
|
||||
if inner == nil {
|
||||
return nil
|
||||
}
|
||||
wrapper := "EXISTS"
|
||||
if negated {
|
||||
wrapper = "NOT EXISTS"
|
||||
}
|
||||
return newFragment(wrapper+" ("+tagSubqueryPrefix+" AND "+inner.sql+")", inner.args...)
|
||||
}
|
||||
|
||||
// ─── value extraction helpers ───────────────────────────────────────────────
|
||||
|
||||
func (v *visitor) addErr(format string, args ...any) {
|
||||
v.errors = append(v.errors, fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
func (v *visitor) singleString(ctx *grammar.ComparisonContext, keyForErr string) (string, bool) {
|
||||
values := ctx.AllValue()
|
||||
if len(values) != 1 {
|
||||
v.addErr("expected exactly one value for %q", keyForErr)
|
||||
return "", false
|
||||
}
|
||||
return v.stringValue(values[0], keyForErr)
|
||||
}
|
||||
|
||||
func (v *visitor) singleBool(ctx *grammar.ComparisonContext) (bool, bool) {
|
||||
values := ctx.AllValue()
|
||||
if len(values) != 1 {
|
||||
v.addErr("expected a single boolean (true/false)")
|
||||
return false, false
|
||||
}
|
||||
return v.boolValue(values[0])
|
||||
}
|
||||
|
||||
func (v *visitor) singleTimestamp(ctx *grammar.ComparisonContext) (time.Time, bool) {
|
||||
values := ctx.AllValue()
|
||||
if len(values) != 1 {
|
||||
v.addErr("expected a single RFC3339 timestamp")
|
||||
return time.Time{}, false
|
||||
}
|
||||
return v.timestampValue(values[0])
|
||||
}
|
||||
|
||||
func (v *visitor) twoTimestamps(ctx *grammar.ComparisonContext) ([2]time.Time, bool) {
|
||||
values := ctx.AllValue()
|
||||
if len(values) != 2 {
|
||||
v.addErr("BETWEEN expects two RFC3339 timestamps")
|
||||
return [2]time.Time{}, false
|
||||
}
|
||||
a, ok1 := v.timestampValue(values[0])
|
||||
b, ok2 := v.timestampValue(values[1])
|
||||
if !ok1 || !ok2 {
|
||||
return [2]time.Time{}, false
|
||||
}
|
||||
return [2]time.Time{a, b}, true
|
||||
}
|
||||
|
||||
func (v *visitor) stringList(ctx *grammar.ComparisonContext, keyForErr string) ([]string, bool) {
|
||||
var valuesCtx []grammar.IValueContext
|
||||
switch {
|
||||
case ctx.InClause() != nil:
|
||||
ic := ctx.InClause()
|
||||
if ic.ValueList() != nil {
|
||||
valuesCtx = ic.ValueList().AllValue()
|
||||
} else {
|
||||
valuesCtx = []grammar.IValueContext{ic.Value()}
|
||||
}
|
||||
case ctx.NotInClause() != nil:
|
||||
nc := ctx.NotInClause()
|
||||
if nc.ValueList() != nil {
|
||||
valuesCtx = nc.ValueList().AllValue()
|
||||
} else {
|
||||
valuesCtx = []grammar.IValueContext{nc.Value()}
|
||||
}
|
||||
default:
|
||||
v.addErr("IN clause is missing for %q", keyForErr)
|
||||
return nil, false
|
||||
}
|
||||
if len(valuesCtx) == 0 {
|
||||
v.addErr("IN list for %q is empty", keyForErr)
|
||||
return nil, false
|
||||
}
|
||||
out := make([]string, 0, len(valuesCtx))
|
||||
for _, vc := range valuesCtx {
|
||||
s, ok := v.stringValue(vc, keyForErr)
|
||||
if !ok {
|
||||
return nil, false
|
||||
}
|
||||
out = append(out, s)
|
||||
}
|
||||
return out, true
|
||||
}
|
||||
|
||||
func (v *visitor) stringValue(ctx grammar.IValueContext, keyForErr string) (string, bool) {
|
||||
if ctx.QUOTED_TEXT() != nil {
|
||||
return trimQuotes(ctx.QUOTED_TEXT().GetText()), true
|
||||
}
|
||||
if ctx.KEY() != nil {
|
||||
// Bare tokens are accepted as strings, mirroring the FilterQuery lexer's
|
||||
// treatment of unquoted identifiers on the value side.
|
||||
return ctx.KEY().GetText(), true
|
||||
}
|
||||
v.addErr("expected a string value for %q, got %q", keyForErr, ctx.GetText())
|
||||
return "", false
|
||||
}
|
||||
|
||||
func (v *visitor) boolValue(ctx grammar.IValueContext) (bool, bool) {
|
||||
if ctx.BOOL() == nil {
|
||||
v.addErr("expected a boolean (true/false), got %q", ctx.GetText())
|
||||
return false, false
|
||||
}
|
||||
return strings.EqualFold(ctx.BOOL().GetText(), "true"), true
|
||||
}
|
||||
|
||||
func (v *visitor) timestampValue(ctx grammar.IValueContext) (time.Time, bool) {
|
||||
if ctx.QUOTED_TEXT() == nil {
|
||||
v.addErr("expected an RFC3339 timestamp string, got %q", ctx.GetText())
|
||||
return time.Time{}, false
|
||||
}
|
||||
raw := trimQuotes(ctx.QUOTED_TEXT().GetText())
|
||||
t, err := time.Parse(time.RFC3339, raw)
|
||||
if err != nil {
|
||||
v.addErr("invalid RFC3339 timestamp %q: %s", raw, err.Error())
|
||||
return time.Time{}, false
|
||||
}
|
||||
return t, true
|
||||
}
|
||||
|
||||
// ─── fragment helpers ────────────────────────────────────────────────────────
|
||||
|
||||
func joinFragments(frags []*fragment, conn string) *fragment {
|
||||
if len(frags) == 0 {
|
||||
return nil
|
||||
}
|
||||
if len(frags) == 1 {
|
||||
return frags[0]
|
||||
}
|
||||
parts := make([]string, len(frags))
|
||||
args := make([]any, 0)
|
||||
for i, f := range frags {
|
||||
parts[i] = f.sql
|
||||
args = append(args, f.args...)
|
||||
}
|
||||
return newFragment(strings.Join(parts, " "+conn+" "), args...)
|
||||
}
|
||||
|
||||
func inFragment(colExpr string, op qbtypesv5.FilterOperator, vals []string) *fragment {
|
||||
placeholders := strings.Repeat("?, ", len(vals))
|
||||
placeholders = placeholders[:len(placeholders)-2]
|
||||
args := make([]any, len(vals))
|
||||
for i, s := range vals {
|
||||
args[i] = s
|
||||
}
|
||||
return newFragment(colExpr+" "+opName(op)+" ("+placeholders+")", args...)
|
||||
}
|
||||
|
||||
// opName returns the user-facing spelling of a FilterOperator. For the
|
||||
// operators we emit directly into SQL (=, !=, <, LIKE, IN, BETWEEN, …) the
|
||||
// spelling doubles as the SQL keyword. For the operators we don't emit
|
||||
// directly (ILIKE, CONTAINS, REGEXP, EXISTS, NOT EXISTS) it's only used in
|
||||
// error messages.
|
||||
func opName(op qbtypesv5.FilterOperator) string {
|
||||
switch op {
|
||||
case qbtypesv5.FilterOperatorEqual:
|
||||
return "="
|
||||
case qbtypesv5.FilterOperatorNotEqual:
|
||||
return "!="
|
||||
case qbtypesv5.FilterOperatorLessThan:
|
||||
return "<"
|
||||
case qbtypesv5.FilterOperatorLessThanOrEq:
|
||||
return "<="
|
||||
case qbtypesv5.FilterOperatorGreaterThan:
|
||||
return ">"
|
||||
case qbtypesv5.FilterOperatorGreaterThanOrEq:
|
||||
return ">="
|
||||
case qbtypesv5.FilterOperatorBetween:
|
||||
return "BETWEEN"
|
||||
case qbtypesv5.FilterOperatorNotBetween:
|
||||
return "NOT BETWEEN"
|
||||
case qbtypesv5.FilterOperatorLike:
|
||||
return "LIKE"
|
||||
case qbtypesv5.FilterOperatorNotLike:
|
||||
return "NOT LIKE"
|
||||
case qbtypesv5.FilterOperatorILike:
|
||||
return "ILIKE"
|
||||
case qbtypesv5.FilterOperatorNotILike:
|
||||
return "NOT ILIKE"
|
||||
case qbtypesv5.FilterOperatorContains:
|
||||
return "CONTAINS"
|
||||
case qbtypesv5.FilterOperatorNotContains:
|
||||
return "NOT CONTAINS"
|
||||
case qbtypesv5.FilterOperatorRegexp:
|
||||
return "REGEXP"
|
||||
case qbtypesv5.FilterOperatorNotRegexp:
|
||||
return "NOT REGEXP"
|
||||
case qbtypesv5.FilterOperatorIn:
|
||||
return "IN"
|
||||
case qbtypesv5.FilterOperatorNotIn:
|
||||
return "NOT IN"
|
||||
case qbtypesv5.FilterOperatorExists:
|
||||
return "EXISTS"
|
||||
case qbtypesv5.FilterOperatorNotExists:
|
||||
return "NOT EXISTS"
|
||||
}
|
||||
return "?"
|
||||
}
|
||||
|
||||
// iLikeToLike maps ILIKE → LIKE for the LOWER(col) LIKE LOWER(?) emission.
|
||||
func iLikeToLike(op qbtypesv5.FilterOperator) qbtypesv5.FilterOperator {
|
||||
if op == qbtypesv5.FilterOperatorNotILike {
|
||||
return qbtypesv5.FilterOperatorNotLike
|
||||
}
|
||||
return qbtypesv5.FilterOperatorLike
|
||||
}
|
||||
|
||||
// containsToLike maps CONTAINS → LIKE for the LIKE '%val%' emission.
|
||||
func containsToLike(op qbtypesv5.FilterOperator) qbtypesv5.FilterOperator {
|
||||
if op == qbtypesv5.FilterOperatorNotContains {
|
||||
return qbtypesv5.FilterOperatorNotLike
|
||||
}
|
||||
return qbtypesv5.FilterOperatorLike
|
||||
}
|
||||
|
||||
// escapeLike escapes the LIKE meta-characters % and _ in user input so that a
|
||||
// CONTAINS query of `50%` doesn't match every value containing `50`.
|
||||
func escapeLike(s string) string {
|
||||
r := strings.NewReplacer(`\`, `\\`, `%`, `\%`, `_`, `\_`)
|
||||
return r.Replace(s)
|
||||
}
|
||||
|
||||
func trimQuotes(s string) string {
|
||||
if len(s) >= 2 {
|
||||
if (s[0] == '"' && s[len(s)-1] == '"') || (s[0] == '\'' && s[len(s)-1] == '\'') {
|
||||
s = s[1 : len(s)-1]
|
||||
}
|
||||
}
|
||||
s = strings.ReplaceAll(s, `\\`, `\`)
|
||||
s = strings.ReplaceAll(s, `\'`, `'`)
|
||||
return s
|
||||
}
|
||||
22
pkg/types/dashboardtypes/pinned_dashboard.go
Normal file
22
pkg/types/dashboardtypes/pinned_dashboard.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package dashboardtypes
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
const MaxPinnedDashboardsPerUser = 10
|
||||
|
||||
var ErrCodePinnedDashboardLimitHit = errors.MustNewCode("pinned_dashboard_limit_hit")
|
||||
|
||||
type PinnedDashboard struct {
|
||||
bun.BaseModel `bun:"table:pinned_dashboard,alias:pinned_dashboard"`
|
||||
|
||||
UserID valuer.UUID `bun:"user_id,pk,type:text"`
|
||||
DashboardID valuer.UUID `bun:"dashboard_id,pk,type:text"`
|
||||
OrgID valuer.UUID `bun:"org_id,type:text,notnull"`
|
||||
PinnedAt time.Time `bun:"pinned_at,notnull,default:current_timestamp"`
|
||||
}
|
||||
@@ -44,4 +44,12 @@ type Store interface {
|
||||
UpdateV2(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, data StorableDashboardData) error
|
||||
|
||||
LockUnlockV2(ctx context.Context, orgID valuer.UUID, id valuer.UUID, locked bool, updatedBy string) error
|
||||
|
||||
// bool return is hasMore — the store fetches Limit+1 to detect it.
|
||||
ListV2(ctx context.Context, orgID valuer.UUID, userID valuer.UUID, params *ListDashboardsV2Params) ([]*DashboardListRow, bool, error)
|
||||
|
||||
// Returns ErrCodePinnedDashboardLimitHit when the user is at MaxPinnedDashboardsPerUser.
|
||||
PinForUser(ctx context.Context, pd *PinnedDashboard) error
|
||||
|
||||
UnpinForUser(ctx context.Context, userID valuer.UUID, dashboardID valuer.UUID) error
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ from collections.abc import Callable
|
||||
from http import HTTPStatus
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
@@ -189,3 +190,99 @@ def test_tag_casing_is_inherited_from_existing_parent(
|
||||
assert second.status_code == HTTPStatus.CREATED
|
||||
second_tags = second.json()["data"]["info"]["tags"]
|
||||
assert second_tags == [{"name": "engineering/US/SF"}]
|
||||
|
||||
|
||||
# ─── list filter DSL ─────────────────────────────────────────────────────────
|
||||
# All fixtures carry a marker tag so each test query can be ANDed with
|
||||
# `tag = '__lst_v2_filter_test'` server-side. That guarantees no leakage
|
||||
# from any other test file/module sharing the session-scoped DB. The marker
|
||||
# rules out a `tag NOT EXISTS` case (a fixture that's no-tags by design
|
||||
# can't also carry the marker) — that operator is covered by the visitor
|
||||
# unit tests in pkg/types/dashboardtypes/listfilter.
|
||||
|
||||
_LIST_FIXTURE_MARKER_TAG = "__lst_v2_filter_test"
|
||||
|
||||
_LIST_FIXTURE_DASHBOARDS = [
|
||||
("lst-overview-prod", ["team/pulse", "prod", "team/frontend"]),
|
||||
("lst-overview-dev", ["team/pulse", "dev"]),
|
||||
("lst-database-prod", ["team/storage", "prod", "database/postgres"]),
|
||||
("lst-database-test", ["team/storage", "test", "database/redis"]),
|
||||
("lst-frontend-team", ["team/pulse", "team/frontend"]),
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(name="list_fixture_dashboards", scope="module")
|
||||
def list_fixture_dashboards(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
for name, tags in _LIST_FIXTURE_DASHBOARDS:
|
||||
body: dict = {
|
||||
"metadata": {"schemaVersion": "v6"},
|
||||
"data": {"display": {"name": name}},
|
||||
"tags": [{"name": t} for t in [*tags, _LIST_FIXTURE_MARKER_TAG]],
|
||||
}
|
||||
response = _post_dashboard(signoz, admin_token, body)
|
||||
assert response.status_code == HTTPStatus.CREATED, response.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("filter_query", "expected_names"),
|
||||
[
|
||||
(
|
||||
"name = 'lst-overview-prod' AND tag = 'team/frontend'",
|
||||
{"lst-overview-prod"},
|
||||
),
|
||||
(
|
||||
"name LIKE 'database/%' AND tag = 'team/storage'",
|
||||
{"lst-database-prod", "lst-database-test"},
|
||||
),
|
||||
(
|
||||
"(name CONTAINS 'overview' OR name CONTAINS 'frontend') AND tag NOT IN ['dev']",
|
||||
{"lst-overview-prod", "lst-frontend-team"},
|
||||
),
|
||||
(
|
||||
"NOT tag = 'prod' AND name CONTAINS 'lst-'",
|
||||
{"lst-overview-dev", "lst-database-test", "lst-frontend-team"},
|
||||
),
|
||||
(
|
||||
"tag = 'team/pulse' AND tag != 'dev'",
|
||||
{"lst-overview-prod", "lst-frontend-team"},
|
||||
),
|
||||
(
|
||||
"tag IN ['dev', 'test'] OR name = 'lst-overview-prod'",
|
||||
{"lst-overview-dev", "lst-database-test", "lst-overview-prod"},
|
||||
),
|
||||
(
|
||||
"tag = 'team/pulse' AND tag LIKE 'database/%'",
|
||||
{"lst-overview-dev", "lst-database-test", "lst-overview-prod"},
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_list_v2_filter_dsl(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
list_fixture_dashboards: None, # pylint: disable=unused-argument
|
||||
filter_query: str,
|
||||
expected_names: set[str],
|
||||
):
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
scoped_query = f"({filter_query}) AND tag = '{_LIST_FIXTURE_MARKER_TAG}'"
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/dashboards"),
|
||||
params={"query": scoped_query, "limit": 200},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK, response.text
|
||||
body = response.json()
|
||||
assert body["status"] == "success"
|
||||
|
||||
returned_names = {
|
||||
d["info"]["data"]["display"]["name"] for d in body["data"]["dashboards"]
|
||||
}
|
||||
assert returned_names == expected_names
|
||||
|
||||
Reference in New Issue
Block a user